Dataset Viewer
code
stringlengths 239
50.1k
| apis
sequence | extract_api
stringlengths 246
34.7k
|
---|---|---|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import queue
from enum import Enum
from graphscope.proto import op_def_pb2
from graphscope.proto import types_pb2
class GSEngine(Enum):
analytical_engine = 0
interactive_engine = 1
learning_engine = 2
class DAGManager(object):
_analytical_engine_split_op = [
types_pb2.BIND_APP, # need loaded graph to compile
types_pb2.ADD_LABELS, # need loaded graph
types_pb2.RUN_APP, # need loaded app
types_pb2.CONTEXT_TO_NUMPY, # need loaded graph to transform selector
types_pb2.CONTEXT_TO_DATAFRAME, # need loaded graph to transform selector
types_pb2.GRAPH_TO_NUMPY, # need loaded graph to transform selector
types_pb2.GRAPH_TO_DATAFRAME, # need loaded graph to transform selector
types_pb2.TO_VINEYARD_TENSOR, # need loaded graph to transform selector
types_pb2.TO_VINEYARD_DATAFRAME, # need loaded graph to transform selector
types_pb2.PROJECT_GRAPH, # need loaded graph to transform selector
types_pb2.PROJECT_TO_SIMPLE, # need loaded graph schema information
types_pb2.ADD_COLUMN, # need ctx result
types_pb2.UNLOAD_GRAPH, # need loaded graph information
types_pb2.UNLOAD_APP, # need loaded app information
]
_interactive_engine_split_op = [
types_pb2.CREATE_INTERACTIVE_QUERY,
types_pb2.SUBGRAPH,
types_pb2.GREMLIN_QUERY,
types_pb2.FETCH_GREMLIN_RESULT,
types_pb2.CLOSE_INTERACTIVE_QUERY,
]
_learning_engine_split_op = [
types_pb2.CREATE_LEARNING_INSTANCE,
types_pb2.CLOSE_LEARNING_INSTANCE,
]
def __init__(self, dag_def: op_def_pb2.DagDef):
self._dag_def = dag_def
self._split_dag_def_queue = queue.Queue()
# split dag
split_dag_def = op_def_pb2.DagDef()
split_dag_def_for = GSEngine.analytical_engine
for op in self._dag_def.op:
if op.op in self._analytical_engine_split_op:
if split_dag_def.op:
self._split_dag_def_queue.put((split_dag_def_for, split_dag_def))
split_dag_def = op_def_pb2.DagDef()
split_dag_def_for = GSEngine.analytical_engine
if op.op in self._interactive_engine_split_op:
if split_dag_def.op:
self._split_dag_def_queue.put((split_dag_def_for, split_dag_def))
split_dag_def = op_def_pb2.DagDef()
split_dag_def_for = GSEngine.interactive_engine
if op.op in self._learning_engine_split_op:
if split_dag_def.op:
self._split_dag_def_queue.put((split_dag_def_for, split_dag_def))
split_dag_def = op_def_pb2.DagDef()
split_dag_def_for = GSEngine.learning_engine
split_dag_def.op.extend([copy.deepcopy(op)])
if len(split_dag_def.op) > 0:
self._split_dag_def_queue.put((split_dag_def_for, split_dag_def))
def empty(self):
return self._split_dag_def_queue.empty()
def get_next_dag(self):
if not self._split_dag_def_queue.empty():
return self._split_dag_def_queue.get()
return None
|
[
"queue.Queue",
"graphscope.proto.op_def_pb2.DagDef",
"copy.deepcopy"
] |
[((2396, 2409), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (2407, 2409), False, 'import queue\n'), ((2455, 2474), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (2472, 2474), False, 'from graphscope.proto import op_def_pb2\n'), ((2779, 2798), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (2796, 2798), False, 'from graphscope.proto import op_def_pb2\n'), ((3076, 3095), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (3093, 3095), False, 'from graphscope.proto import op_def_pb2\n'), ((3371, 3390), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (3388, 3390), False, 'from graphscope.proto import op_def_pb2\n'), ((3489, 3506), 'copy.deepcopy', 'copy.deepcopy', (['op'], {}), '(op)\n', (3502, 3506), False, 'import copy\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from graphscope.framework.graph import Graph
from graphscope.framework.loader import Loader
def load_modern_graph(sess, prefix, directed=True):
"""Load modern graph.
Modern graph consist 6 vertices and 6 edges, useful to test the basic
functionalities.
Args:
sess (:class:`graphscope.Session`): Load graph within the session.
prefix (str): Data directory.
directed (bool, optional): Determine to load a directed or undirected graph.
Defaults to True.
Returns:
:class:`graphscope.Graph`: A Graph object which graph type is ArrowProperty
"""
graph = Graph(sess, directed=directed)
graph = (
graph.add_vertices(
Loader(os.path.join(prefix, "person.csv"), delimiter="|"),
"person",
["name", ("age", "int")],
"id",
)
.add_vertices(
Loader(os.path.join(prefix, "software.csv"), delimiter="|"),
"software",
["name", "lang"],
"id",
)
.add_edges(
Loader(os.path.join(prefix, "knows.csv"), delimiter="|"),
"knows",
["weight"],
src_label="person",
dst_label="person",
src_field="src_id",
dst_field="dst_id",
)
.add_edges(
Loader(os.path.join(prefix, "created.csv"), delimiter="|"),
"created",
["weight"],
src_label="person",
dst_label="software",
src_field="src_id",
dst_field="dst_id",
)
)
return graph
|
[
"graphscope.framework.graph.Graph",
"os.path.join"
] |
[((1302, 1332), 'graphscope.framework.graph.Graph', 'Graph', (['sess'], {'directed': 'directed'}), '(sess, directed=directed)\n', (1307, 1332), False, 'from graphscope.framework.graph import Graph\n'), ((2024, 2059), 'os.path.join', 'os.path.join', (['prefix', '"""created.csv"""'], {}), "(prefix, 'created.csv')\n", (2036, 2059), False, 'import os\n'), ((1751, 1784), 'os.path.join', 'os.path.join', (['prefix', '"""knows.csv"""'], {}), "(prefix, 'knows.csv')\n", (1763, 1784), False, 'import os\n'), ((1576, 1612), 'os.path.join', 'os.path.join', (['prefix', '"""software.csv"""'], {}), "(prefix, 'software.csv')\n", (1588, 1612), False, 'import os\n'), ((1394, 1428), 'os.path.join', 'os.path.join', (['prefix', '"""person.csv"""'], {}), "(prefix, 'person.csv')\n", (1406, 1428), False, 'import os\n')]
|
import networkx.algorithms.tests.test_link_prediction
import pytest
from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx
from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context
import_as_graphscope_nx(networkx.algorithms.tests.test_link_prediction,
decorators=pytest.mark.usefixtures("graphscope_session"))
from networkx.algorithms.tests.test_link_prediction import TestAdamicAdarIndex
from networkx.algorithms.tests.test_link_prediction import TestCNSoundarajanHopcroft
from networkx.algorithms.tests.test_link_prediction import TestJaccardCoefficient
from networkx.algorithms.tests.test_link_prediction import TestPreferentialAttachment
from networkx.algorithms.tests.test_link_prediction import \
TestRAIndexSoundarajanHopcroft
from networkx.algorithms.tests.test_link_prediction import TestResourceAllocationIndex
from networkx.algorithms.tests.test_link_prediction import TestWithinInterCluster
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestResourceAllocationIndex)
class TestResourceAllocationIndex:
def test_notimplemented(self):
assert pytest.raises(nx.NetworkXNotImplemented, self.func,
nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)])
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestJaccardCoefficient)
class TestJaccardCoefficient:
def test_notimplemented(self):
assert pytest.raises(nx.NetworkXNotImplemented, self.func,
nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)])
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestPreferentialAttachment)
class TestPreferentialAttachment:
def test_notimplemented(self):
assert pytest.raises(nx.NetworkXNotImplemented, self.func,
nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)])
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestAdamicAdarIndex)
class TestAdamicAdarIndex:
def test_notimplemented(self):
assert pytest.raises(nx.NetworkXNotImplemented, self.func,
nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)])
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestCNSoundarajanHopcroft)
class TestCNSoundarajanHopcroft:
def test_notimplemented(self):
G = nx.DiGraph([(0, 1), (1, 2)])
G.add_nodes_from([0, 1, 2], community=0)
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestRAIndexSoundarajanHopcroft)
class TestRAIndexSoundarajanHopcroft:
def test_notimplemented(self):
G = nx.DiGraph([(0, 1), (1, 2)])
G.add_nodes_from([0, 1, 2], community=0)
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestWithinInterCluster)
class TestWithinInterCluster:
def test_notimplemented(self):
G = nx.DiGraph([(0, 1), (1, 2)])
G.add_nodes_from([0, 1, 2], community=0)
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
|
[
"graphscope.experimental.nx.utils.compat.with_graphscope_nx_context",
"pytest.raises",
"pytest.mark.usefixtures"
] |
[((980, 1025), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1003, 1025), False, 'import pytest\n'), ((1027, 1082), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestResourceAllocationIndex'], {}), '(TestResourceAllocationIndex)\n', (1053, 1082), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1292, 1337), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1315, 1337), False, 'import pytest\n'), ((1339, 1389), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestJaccardCoefficient'], {}), '(TestJaccardCoefficient)\n', (1365, 1389), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1594, 1639), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1617, 1639), False, 'import pytest\n'), ((1641, 1695), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestPreferentialAttachment'], {}), '(TestPreferentialAttachment)\n', (1667, 1695), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1904, 1949), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1927, 1949), False, 'import pytest\n'), ((1951, 1998), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestAdamicAdarIndex'], {}), '(TestAdamicAdarIndex)\n', (1977, 1998), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((2200, 2245), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2223, 2245), False, 'import pytest\n'), ((2247, 2300), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestCNSoundarajanHopcroft'], {}), '(TestCNSoundarajanHopcroft)\n', (2273, 2300), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((2542, 2587), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2565, 2587), False, 'import pytest\n'), ((2589, 2647), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestRAIndexSoundarajanHopcroft'], {}), '(TestRAIndexSoundarajanHopcroft)\n', (2615, 2647), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((2894, 2939), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2917, 2939), False, 'import pytest\n'), ((2941, 2991), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestWithinInterCluster'], {}), '(TestWithinInterCluster)\n', (2967, 2991), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((332, 377), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (355, 377), False, 'import pytest\n'), ((2474, 2538), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'self.func', 'G', '[(0, 2)]'], {}), '(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])\n', (2487, 2538), False, 'import pytest\n'), ((2826, 2890), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'self.func', 'G', '[(0, 2)]'], {}), '(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])\n', (2839, 2890), False, 'import pytest\n'), ((3162, 3226), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'self.func', 'G', '[(0, 2)]'], {}), '(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])\n', (3175, 3226), False, 'import pytest\n')]
|
import networkx.algorithms.centrality.tests.test_current_flow_betweenness_centrality
import pytest
from graphscope.nx.utils.compat import import_as_graphscope_nx
from graphscope.nx.utils.compat import with_graphscope_nx_context
import_as_graphscope_nx(
networkx.algorithms.centrality.tests.test_current_flow_betweenness_centrality,
decorators=pytest.mark.usefixtures("graphscope_session"))
from networkx.algorithms.centrality.tests.test_current_flow_betweenness_centrality import \
TestApproximateFlowBetweennessCentrality
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestApproximateFlowBetweennessCentrality)
class TestApproximateFlowBetweennessCentrality:
# NB: graphscope.nx does not support grid_graph, pass the test
def test_grid(self):
pass
|
[
"graphscope.nx.utils.compat.with_graphscope_nx_context",
"pytest.mark.usefixtures"
] |
[((542, 587), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (565, 587), False, 'import pytest\n'), ((589, 657), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestApproximateFlowBetweennessCentrality'], {}), '(TestApproximateFlowBetweennessCentrality)\n', (615, 657), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((353, 398), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (376, 398), False, 'import pytest\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import importlib
import logging
import os
import random
import string
import sys
import time
import numpy as np
import pytest
import graphscope
graphscope.set_option(show_log=True)
from graphscope import property_sssp
from graphscope import sssp
from graphscope.framework.app import AppAssets
from graphscope.framework.app import AppDAGNode
from graphscope.framework.errors import AnalyticalEngineInternalError
from graphscope.framework.errors import InvalidArgumentError
from graphscope.framework.loader import Loader
test_repo_dir = os.path.expandvars("${GS_TEST_DIR}")
prefix = os.path.join(test_repo_dir, "ogbn_mag_small")
new_property_dir = os.path.join(test_repo_dir, "new_property", "v2_e2")
@pytest.fixture(scope="module")
def sess():
session = graphscope.session(cluster_type="hosts", num_workers=2, mode="lazy")
session.as_default()
yield session
session.close()
@pytest.fixture(scope="function")
def student_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")):
return Loader("%s/student.v" % data_dir, header_row=True, delimiter=",")
@pytest.fixture(scope="function")
def teacher_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")):
return Loader("%s/teacher.v" % data_dir, header_row=True, delimiter=",")
@pytest.fixture(scope="function")
def student_group_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")):
return Loader("%s/group.e" % data_dir, header_row=True, delimiter=",")
@pytest.fixture(scope="function")
def teacher_group_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")):
return Loader("%s/teacher_group.e" % data_dir, header_row=True, delimiter=",")
def arrow_property_graph(graphscope_session):
g = graphscope_session.g(generate_eid=False)
g = g.add_vertices(f"{new_property_dir}/twitter_v_0", "v0")
g = g.add_vertices(f"{new_property_dir}/twitter_v_1", "v1")
g = g.add_edges(f"{new_property_dir}/twitter_e_0_0_0", "e0", ["weight"], "v0", "v0")
g = g.add_edges(f"{new_property_dir}/twitter_e_0_1_0", "e0", ["weight"], "v0", "v1")
g = g.add_edges(f"{new_property_dir}/twitter_e_1_0_0", "e0", ["weight"], "v1", "v0")
g = g.add_edges(f"{new_property_dir}/twitter_e_1_1_0", "e0", ["weight"], "v1", "v1")
g = g.add_edges(f"{new_property_dir}/twitter_e_0_0_1", "e1", ["weight"], "v0", "v0")
g = g.add_edges(f"{new_property_dir}/twitter_e_0_1_1", "e1", ["weight"], "v0", "v1")
g = g.add_edges(f"{new_property_dir}/twitter_e_1_0_1", "e1", ["weight"], "v1", "v0")
g = g.add_edges(f"{new_property_dir}/twitter_e_1_1_1", "e1", ["weight"], "v1", "v1")
return g
def test_vertices_omitted_form_loader(sess, student_group_e):
g = sess.g()
g1 = g.add_edges(student_group_e)
g2 = sess.run(g1) # g2 is a Graph instance
assert g2.loaded()
def test_construct_graph_step_by_step(sess):
_g = sess.g(generate_eid=False)
g = sess.run(_g)
_g1 = g.add_vertices(f"{new_property_dir}/twitter_v_0", "v0")
g1 = sess.run(_g1)
_g2 = g1.add_vertices(f"{new_property_dir}/twitter_v_1", "v1")
g2 = sess.run(_g2)
ug = g.unload()
ug1 = g1.unload()
ug2 = g2.unload()
sess.run([ug, ug1, ug2])
def test_unload_graph(sess, student_v, teacher_v, student_group_e):
# case 1
# 1. load empty g
# 2. unload g
g = sess.g()
ug = g.unload()
assert sess.run(ug) is None
# case 2
g = sess.g()
g1 = g.add_vertices(student_v, "student")
g2 = g.add_vertices(teacher_v, "teacher")
ug1 = g1.unload()
ug2 = g2.unload()
assert sess.run(ug1) is None
assert sess.run(ug2) is None
# case 3
g = sess.g()
g1 = g.add_vertices(student_v, "student")
g2 = g1.add_vertices(teacher_v, "teacher")
g3 = g2.add_edges(
student_group_e, "group", src_label="student", dst_label="student"
)
ug = g.unload()
ug1 = g1.unload()
ug2 = g2.unload()
ug3 = g3.unload()
sess.run([ug, ug1, ug2, ug3])
# case 4
# test unload twice
g = sess.g()
ug = g.unload()
assert sess.run(ug) is None
assert sess.run(ug) is None
def test_error_using_unload_graph(sess, student_v):
with pytest.raises(AnalyticalEngineInternalError):
g = sess.g()
ug = g.unload()
g1 = g.add_vertices(student_v, "student")
sess.run([ug, g1])
def test_unload_app(sess):
g = arrow_property_graph(sess)
# case 1
a1 = AppDAGNode(g, AppAssets(algo="property_sssp", context="labeled_vertex_data"))
ua1 = a1.unload()
assert sess.run(ua1) is None
# case 2
# unload app twice
a1 = AppDAGNode(g, AppAssets(algo="property_sssp", context="labeled_vertex_data"))
ua1 = a1.unload()
assert sess.run(ua1) is None
assert sess.run(ua1) is None
# case 3
# load app after unload
a1 = AppDAGNode(g, AppAssets(algo="property_sssp", context="labeled_vertex_data"))
ua1 = a1.unload()
assert sess.run(ua1) is None
c1 = a1(src=20)
r1 = c1.to_numpy("r:v0.dist_0")
r = sess.run(r1)
assert r.shape == (40521,)
def test_graph_to_numpy(sess):
g = arrow_property_graph(sess)
c = property_sssp(g, 20)
ctx_out_np = c.to_numpy("r:v0.dist_0")
g2 = g.add_column(c, {"result_0": "r:v0.dist_0"})
graph_out_np = g2.to_numpy("v:v0.result_0")
r = sess.run([ctx_out_np, graph_out_np])
assert np.all(r[0] == r[1])
# unload graph
ug = g.unload()
ug2 = g2.unload()
sess.run([ug, ug2])
def test_graph_to_dataframe(sess):
g = arrow_property_graph(sess)
c = property_sssp(g, 20)
ctx_out_df = c.to_dataframe({"result": "r:v0.dist_0"})
g2 = g.add_column(c, {"result_0": "r:v0.dist_0"})
graph_out_df = g2.to_dataframe({"result": "v:v0.result_0"})
r = sess.run([ctx_out_df, graph_out_df])
assert r[0].equals(r[1])
# unload graph
ug = g.unload()
ug2 = g2.unload()
sess.run([ug, ug2])
def test_context(sess):
g = arrow_property_graph(sess)
c = property_sssp(g, 20)
r1 = c.to_numpy("r:v0.dist_0")
r2 = c.to_dataframe({"id": "v:v0.id", "result": "r:v0.dist_0"})
r3 = c.to_vineyard_tensor("v:v0.id")
r4 = c.to_vineyard_dataframe(
{"id": "v:v0.id", "data": "v:v0.dist", "result": "r:v0.dist_0"}
)
r = sess.run([r1, r2, r3, r4])
assert r[0].shape == (40521,)
assert r[1].shape == (40521, 2)
assert r[2] is not None
assert r[3] is not None
def test_error_selector_context(sess):
# case 1
# labeled vertex data context
g = arrow_property_graph(sess)
c = property_sssp(g, 20)
with pytest.raises(
InvalidArgumentError,
match="Selector in labeled vertex data context cannot be None",
):
r = c.to_numpy(selector=None)
with pytest.raises(ValueError, match="not enough values to unpack"):
# missing ":" in selectot
r = c.to_numpy("r.v0.dist_0")
with pytest.raises(SyntaxError, match="Invalid selector"):
# must be "v/e/r:xxx"
r = c.to_numpy("c:v0.dist_0")
with pytest.raises(SyntaxError, match="Invalid selector"):
# format error
c.to_numpy("r:v0.dist_0.dist_1")
# case 2
# vertex data context
pg = g.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]})
c = sssp(pg, 20)
with pytest.raises(SyntaxError, match="Selector of v must be 'v.id' or 'v.data'"):
r = c.to_dataframe({"id": "v.ID"})
with pytest.raises(ValueError, match="selector of to_dataframe must be a dict"):
r = c.to_dataframe("id")
def test_query_after_project(sess):
g = arrow_property_graph(sess)
pg = g.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]})
# property sssp on property graph
# sssp on simple graph
c = sssp(pg, 20)
r1 = c.to_dataframe({"node": "v.id", "r": "r"})
r = sess.run(r1)
assert r.shape == (40521, 2)
def test_add_column(sess):
g = arrow_property_graph(sess)
pg = g.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]})
c = sssp(pg, 20)
g1 = g.add_column(c, {"id_col": "v.id", "data_col": "v.data", "result_col": "r"})
sess.run(g1)
def test_multi_src_dst_edge_loader(
sess, student_group_e, teacher_group_e, student_v, teacher_v
):
graph = sess.g()
graph = graph.add_vertices(
student_v, "student", ["name", "lesson_nums", "avg_score"], "student_id"
)
graph = graph.add_vertices(
teacher_v, "teacher", ["student_num", "score", "email", "tel"], "teacher_id"
)
graph = graph.add_edges(
student_group_e,
"group",
["group_id", "member_size"],
src_label="student",
dst_label="student",
src_field="leader_student_id",
dst_field="member_student_id",
)
graph = graph.add_edges(
teacher_group_e,
"group",
["group_id", "member_size"],
src_label="teacher",
dst_label="teacher",
src_field="leader_teacher_id",
dst_field="member_teacher_id",
)
g = sess.run(graph)
def test_simulate_eager(sess):
g1_node = arrow_property_graph(sess)
g1 = sess.run(g1_node)
c_node = property_sssp(g1, 20)
c = sess.run(c_node)
r_node = c.to_numpy("r:v0.dist_0")
r = sess.run(r_node)
assert r.shape == (40521,)
pg_node = g1.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]})
pg = sess.run(pg_node)
c_node = sssp(pg, 20)
c = sess.run(c_node)
g2_node = g1.add_column(
c, {"id_col": "v.id", "data_col": "v.data", "result_col": "r"}
)
g2 = sess.run(g2_node)
|
[
"os.path.expandvars",
"os.path.join",
"graphscope.framework.app.AppAssets",
"graphscope.session",
"graphscope.framework.loader.Loader",
"graphscope.set_option",
"graphscope.property_sssp",
"graphscope.sssp",
"pytest.raises",
"pytest.fixture",
"numpy.all"
] |
[((814, 850), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (835, 850), False, 'import graphscope\n'), ((1207, 1243), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (1225, 1243), False, 'import os\n'), ((1253, 1298), 'os.path.join', 'os.path.join', (['test_repo_dir', '"""ogbn_mag_small"""'], {}), "(test_repo_dir, 'ogbn_mag_small')\n", (1265, 1298), False, 'import os\n'), ((1319, 1371), 'os.path.join', 'os.path.join', (['test_repo_dir', '"""new_property"""', '"""v2_e2"""'], {}), "(test_repo_dir, 'new_property', 'v2_e2')\n", (1331, 1371), False, 'import os\n'), ((1375, 1405), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1389, 1405), False, 'import pytest\n'), ((1567, 1599), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1581, 1599), False, 'import pytest\n'), ((1757, 1789), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1771, 1789), False, 'import pytest\n'), ((1947, 1979), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1961, 1979), False, 'import pytest\n'), ((2141, 2173), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2155, 2173), False, 'import pytest\n'), ((1432, 1500), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(2)', 'mode': '"""lazy"""'}), "(cluster_type='hosts', num_workers=2, mode='lazy')\n", (1450, 1500), False, 'import graphscope\n'), ((1623, 1674), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1641, 1674), False, 'import os\n'), ((1688, 1753), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/student.v' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/student.v' % data_dir, header_row=True, delimiter=',')\n", (1694, 1753), False, 'from graphscope.framework.loader import Loader\n'), ((1813, 1864), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1831, 1864), False, 'import os\n'), ((1878, 1943), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/teacher.v' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/teacher.v' % data_dir, header_row=True, delimiter=',')\n", (1884, 1943), False, 'from graphscope.framework.loader import Loader\n'), ((2009, 2060), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2027, 2060), False, 'import os\n'), ((2074, 2137), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/group.e' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/group.e' % data_dir, header_row=True, delimiter=',')\n", (2080, 2137), False, 'from graphscope.framework.loader import Loader\n'), ((2203, 2254), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2221, 2254), False, 'import os\n'), ((2268, 2339), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/teacher_group.e' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/teacher_group.e' % data_dir, header_row=True, delimiter=',')\n", (2274, 2339), False, 'from graphscope.framework.loader import Loader\n'), ((5799, 5819), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (5812, 5819), False, 'from graphscope import property_sssp\n'), ((6021, 6041), 'numpy.all', 'np.all', (['(r[0] == r[1])'], {}), '(r[0] == r[1])\n', (6027, 6041), True, 'import numpy as np\n'), ((6207, 6227), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (6220, 6227), False, 'from graphscope import property_sssp\n'), ((6633, 6653), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (6646, 6653), False, 'from graphscope import property_sssp\n'), ((7202, 7222), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (7215, 7222), False, 'from graphscope import property_sssp\n'), ((7915, 7927), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (7919, 7927), False, 'from graphscope import sssp\n'), ((8392, 8404), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (8396, 8404), False, 'from graphscope import sssp\n'), ((8653, 8665), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (8657, 8665), False, 'from graphscope import sssp\n'), ((9776, 9797), 'graphscope.property_sssp', 'property_sssp', (['g1', '(20)'], {}), '(g1, 20)\n', (9789, 9797), False, 'from graphscope import property_sssp\n'), ((10034, 10046), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (10038, 10046), False, 'from graphscope import sssp\n'), ((4831, 4875), 'pytest.raises', 'pytest.raises', (['AnalyticalEngineInternalError'], {}), '(AnalyticalEngineInternalError)\n', (4844, 4875), False, 'import pytest\n'), ((5100, 5162), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (5109, 5162), False, 'from graphscope.framework.app import AppAssets\n'), ((5279, 5341), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (5288, 5341), False, 'from graphscope.framework.app import AppAssets\n'), ((5496, 5558), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (5505, 5558), False, 'from graphscope.framework.app import AppAssets\n'), ((7232, 7336), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""Selector in labeled vertex data context cannot be None"""'}), "(InvalidArgumentError, match=\n 'Selector in labeled vertex data context cannot be None')\n", (7245, 7336), False, 'import pytest\n'), ((7403, 7465), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""not enough values to unpack"""'}), "(ValueError, match='not enough values to unpack')\n", (7416, 7465), False, 'import pytest\n'), ((7548, 7600), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Invalid selector"""'}), "(SyntaxError, match='Invalid selector')\n", (7561, 7600), False, 'import pytest\n'), ((7679, 7731), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Invalid selector"""'}), "(SyntaxError, match='Invalid selector')\n", (7692, 7731), False, 'import pytest\n'), ((7937, 8013), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Selector of v must be \'v.id\' or \'v.data\'"""'}), '(SyntaxError, match="Selector of v must be \'v.id\' or \'v.data\'")\n', (7950, 8013), False, 'import pytest\n'), ((8067, 8141), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""selector of to_dataframe must be a dict"""'}), "(ValueError, match='selector of to_dataframe must be a dict')\n", (8080, 8141), False, 'import pytest\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from enum import Enum
from typing import Any
from typing import NamedTuple
import networkx.readwrite.gml
from graphscope.nx.utils.compat import import_as_graphscope_nx
import_as_graphscope_nx(networkx.readwrite.gml)
class Pattern(Enum):
""" encodes the index of each token-matching pattern in `tokenize`. """
KEYS = 0
REALS = 1
INTS = 2
STRINGS = 3
DICT_START = 4
DICT_END = 5
COMMENT_WHITESPACE = 6
class Token(NamedTuple):
category: Pattern
value: Any
line: int
position: int
|
[
"graphscope.nx.utils.compat.import_as_graphscope_nx"
] |
[((838, 885), 'graphscope.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.readwrite.gml'], {}), '(networkx.readwrite.gml)\n', (861, 885), False, 'from graphscope.nx.utils.compat import import_as_graphscope_nx\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from itertools import chain
from typing import Any
from typing import Dict
from typing import Mapping
from typing import Sequence
from typing import Tuple
from typing import Union
import numpy as np
import pandas as pd
from graphscope.client.session import get_default_session
from graphscope.framework import dag_utils
from graphscope.framework import utils
from graphscope.framework.errors import InvalidArgumentError
from graphscope.framework.errors import check_argument
from graphscope.framework.graph import Graph
from graphscope.framework.loader import Loader
from graphscope.framework.vineyard_object import VineyardObject
from graphscope.proto import attr_value_pb2
from graphscope.proto import types_pb2
__all__ = ["load_from"]
LoaderVariants = Union[Loader, str, Sequence[np.ndarray], pd.DataFrame, VineyardObject]
class VertexLabel(object):
"""Holds meta informations about a single vertex label."""
def __init__(
self,
label: str,
loader: Any,
properties: Sequence = None,
vid: Union[str, int] = 0,
):
self.label = label
if isinstance(loader, Loader):
self.loader = loader
else:
self.loader = Loader(loader)
self.raw_properties = properties
self.properties = []
self.vid = vid
def finish(self, id_type: str = "int64_t"):
# Normalize properties
# Add vid to property list
self.add_property(str(self.vid), id_type)
if self.raw_properties:
self.add_properties(self.raw_properties)
elif self.loader.deduced_properties:
self.add_properties(self.loader.deduced_properties)
self.loader.select_columns(
self.properties, include_all=bool(not self.raw_properties)
)
def __str__(self) -> str:
s = "\ntype: VertexLabel"
s += "\nlabel: " + self.label
s += "\nproperties: " + str(self.properties)
s += "\nvid: " + str(self.vid)
s += "\nloader: " + repr(self.loader)
return s
def __repr__(self) -> str:
return self.__str__()
def add_property(self, prop: str, dtype=None) -> None:
"""prop is a str, representing name. It can optionally have a type."""
self.properties.append((prop, utils.unify_type(dtype)))
def add_properties(self, properties: Sequence) -> None:
for prop in properties:
if isinstance(prop, str):
self.add_property(prop)
else:
self.add_property(prop[0], prop[1])
class EdgeSubLabel(object):
"""Hold meta informations of a single relationship.
i.e. src_label -> edge_label -> dst_label
"""
def __init__(
self,
loader,
properties=None,
source=None,
destination=None,
load_strategy="both_out_in",
):
if isinstance(loader, Loader):
self.loader = loader
else:
self.loader = Loader(loader)
self.raw_properties = properties
self.properties = []
self.source_vid = 0
self.source_label = ""
self.destination_vid = 1
self.destination_label = ""
self.load_strategy = ""
if source is not None:
self.set_source(source)
if destination is not None:
self.set_destination(destination)
if (
isinstance(self.source_vid, int) and isinstance(self.destination_vid, str)
) or (
isinstance(self.source_vid, str) and isinstance(self.destination_vid, int)
):
raise SyntaxError(
"Source vid and destination vid must have same formats, both use name or both use index"
)
self.set_load_strategy(load_strategy)
def finish(self, id_type: str):
self.add_property(str(self.source_vid), id_type)
self.add_property(str(self.destination_vid), id_type)
if self.raw_properties:
self.add_properties(self.raw_properties)
elif self.loader.deduced_properties:
self.add_properties(self.loader.deduced_properties)
self.loader.select_columns(
self.properties, include_all=bool(not self.raw_properties)
)
def __str__(self) -> str:
s = "\ntype: EdgeSubLabel"
s += "\nsource_label: " + self.source_label
s += "\ndestination_label: " + self.destination_label
s += "\nproperties: " + str(self.properties)
s += "\nloader: " + repr(self.loader)
return s
def __repr__(self) -> str:
return self.__str__()
@staticmethod
def resolve_src_dst_value(value: Union[int, str, Tuple[Union[int, str], str]]):
"""Resolve the edge's source and destination.
Args:
value (Union[int, str, Tuple[Union[int, str], str]]):
1. a int, represent vid id. a str, represent vid name
2. a ([int/str], str). former represents vid, latter represents label
Raises:
SyntaxError: If the format is incorrect.
"""
if isinstance(value, (int, str)):
check_argument(
isinstance(value, int)
or (isinstance(value, str) and not value.isdecimal()),
"Column name cannot be decimal",
)
return value, ""
elif isinstance(value, Sequence):
check_argument(len(value) == 2)
check_argument(
isinstance(value[0], int)
or (isinstance(value[0], str) and not value[0].isdecimal()),
"Column name cannot be decimal",
)
check_argument(isinstance(value[1], str), "Label must be str")
return value[0], value[1]
else:
raise InvalidArgumentError(
"Source / destination format incorrect. Expect vid or [vid, source_label]"
)
def set_source(self, source: Union[int, str, Tuple[Union[int, str], str]]):
self.source_vid, self.source_label = self.resolve_src_dst_value(source)
def set_destination(
self, destination: Union[int, str, Tuple[Union[int, str], str]]
):
self.destination_vid, self.destination_label = self.resolve_src_dst_value(
destination
)
def set_load_strategy(self, strategy: str):
check_argument(
strategy in ("only_out", "only_in", "both_out_in"),
"invalid load strategy: " + strategy,
)
self.load_strategy = strategy
def add_property(self, prop: str, dtype=None) -> None:
"""prop is a str, representing name. It can optionally have a type."""
self.properties.append((prop, utils.unify_type(dtype)))
def add_properties(self, properties: Sequence) -> None:
for prop in properties:
if isinstance(prop, str):
self.add_property(prop)
else:
self.add_property(prop[0], prop[1])
def get_attr(self):
attr_list = attr_value_pb2.NameAttrList()
attr_list.name = "{}_{}".format(self.source_label, self.destination_label)
attr_list.attr[types_pb2.SRC_LABEL].CopyFrom(utils.s_to_attr(self.source_label))
attr_list.attr[types_pb2.DST_LABEL].CopyFrom(
utils.s_to_attr(self.destination_label)
)
attr_list.attr[types_pb2.LOAD_STRATEGY].CopyFrom(
utils.s_to_attr(self.load_strategy)
)
attr_list.attr[types_pb2.SRC_VID].CopyFrom(
utils.s_to_attr(str(self.source_vid))
)
attr_list.attr[types_pb2.DST_VID].CopyFrom(
utils.s_to_attr(str(self.destination_vid))
)
attr_list.attr[types_pb2.LOADER].CopyFrom(self.loader.get_attr())
props = []
for prop in self.properties[2:]:
prop_attr = attr_value_pb2.NameAttrList()
prop_attr.name = prop[0]
prop_attr.attr[0].CopyFrom(utils.type_to_attr(prop[1]))
props.append(prop_attr)
attr_list.attr[types_pb2.PROPERTIES].list.func.extend(props)
return attr_list
class EdgeLabel(object):
"""Hold meta informations of an edge label.
An Edge label may be consist of a few `EdgeSubLabel`s.
i.e. src_label1 -> edge_label -> dst_label1
src_label2 -> edge_label -> dst_label2
src_label3 -> edge_label -> dst_label3
"""
def __init__(self, label: str):
self.label = label
self.sub_labels = []
self._finished = False
def __str__(self):
s = "\ntype: EdgeLabel"
s += "\nlabel: " + self.label
s += "\nsub_labels: "
for sub_label in self.sub_labels:
s += "\n"
s += str(sub_label)
return s
def __repr__(self):
return self.__str__()
def add_sub_label(self, sub_label):
self.sub_labels.append(sub_label)
def finish(self, id_type: str = "int64_t"):
for sub_label in self.sub_labels:
sub_label.finish(id_type)
def process_vertex(vertex: VertexLabel) -> attr_value_pb2.NameAttrList:
attr_list = attr_value_pb2.NameAttrList()
attr_list.name = "vertex"
attr_list.attr[types_pb2.LABEL].CopyFrom(utils.s_to_attr(vertex.label))
attr_list.attr[types_pb2.VID].CopyFrom(utils.s_to_attr(str(vertex.vid)))
props = []
for prop in vertex.properties[1:]:
prop_attr = attr_value_pb2.NameAttrList()
prop_attr.name = prop[0]
prop_attr.attr[0].CopyFrom(utils.type_to_attr(prop[1]))
props.append(prop_attr)
attr_list.attr[types_pb2.PROPERTIES].list.func.extend(props)
attr_list.attr[types_pb2.LOADER].CopyFrom(vertex.loader.get_attr())
return attr_list
def process_edge(edge: EdgeLabel) -> attr_value_pb2.NameAttrList:
attr_list = attr_value_pb2.NameAttrList()
attr_list.name = "edge"
attr_list.attr[types_pb2.LABEL].CopyFrom(utils.s_to_attr(edge.label))
sub_label_attr = [sub_label.get_attr() for sub_label in edge.sub_labels]
attr_list.attr[types_pb2.SUB_LABEL].list.func.extend(sub_label_attr)
return attr_list
def _sanity_check(edges: Sequence[EdgeLabel], vertices: Sequence[VertexLabel]):
vertex_labels = []
for v in vertices:
vertex_labels.append(v.label)
if not vertex_labels:
vertex_labels.append("_")
for edge in edges:
# Check source label and destination label
check_argument(len(edge.sub_labels) != 0, "Edge label is malformed.")
for sub_label in edge.sub_labels:
if sub_label.source_label or sub_label.destination_label:
if not (sub_label.source_label and sub_label.destination_label):
raise RuntimeError(
"source label and destination label must be both specified or either unspecified"
)
# Handle default label. If edge doesn't specify label, then use default.
if not sub_label.source_label and not sub_label.destination_label:
check_argument(len(vertex_labels) == 1, "ambiguous vertex label")
if len(vertex_labels) == 1:
sub_label.source_label = (
sub_label.destination_label
) = vertex_labels[0]
if vertices is not None and len(vertices) > 0:
check_argument(
sub_label.source_label in vertex_labels,
"source label not found in vertex labels",
)
check_argument(
sub_label.destination_label in vertex_labels,
"destination label not found in vertex labels",
)
check_argument(
sub_label.source_vid != sub_label.destination_vid,
"source col and destination col cannot refer to the same col",
)
return edges, vertices
def _get_config(
edges: Sequence[EdgeLabel],
vertices: Sequence[VertexLabel],
directed: bool,
oid_type: str,
generate_eid: bool,
) -> Dict:
config = {}
attr = attr_value_pb2.AttrValue()
for label in chain(edges, vertices):
label.finish(oid_type)
for edge in edges:
attr.list.func.extend([process_edge(edge)])
attr.list.func.extend([process_vertex(vertex) for vertex in vertices])
directed_attr = utils.b_to_attr(directed)
generate_eid_attr = utils.b_to_attr(generate_eid)
config[types_pb2.ARROW_PROPERTY_DEFINITION] = attr
config[types_pb2.DIRECTED] = directed_attr
config[types_pb2.OID_TYPE] = utils.s_to_attr(oid_type)
config[types_pb2.GENERATE_EID] = generate_eid_attr
# vid_type is fixed
config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t")
config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(False)
return config
def normalize_parameter_edges(
edges: Union[
Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants
]
):
"""Normalize parameters user passed in. Since parameters are very flexible, we need to be
careful about it.
Args:
edges (Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants ]):
Edges definition.
"""
def process_sub_label(items):
if isinstance(items, (Loader, str, pd.DataFrame, VineyardObject)):
return EdgeSubLabel(items, properties=None, source=None, destination=None)
elif isinstance(items, Sequence):
if all([isinstance(item, np.ndarray) for item in items]):
return EdgeSubLabel(
loader=items, properties=None, source=None, destination=None
)
else:
check_argument(len(items) < 6, "Too many arguments for a edge label")
return EdgeSubLabel(*items)
elif isinstance(items, Mapping):
return EdgeSubLabel(**items)
else:
raise SyntaxError("Wrong format of e sub label: " + str(items))
def process_label(label, items):
e_label = EdgeLabel(label)
if isinstance(items, (Loader, str, pd.DataFrame, VineyardObject)):
e_label.add_sub_label(process_sub_label(items))
elif isinstance(items, Sequence):
if isinstance(
items[0], (Loader, str, pd.DataFrame, VineyardObject, np.ndarray)
):
e_label.add_sub_label(process_sub_label(items))
else:
for item in items:
e_label.add_sub_label(process_sub_label(item))
elif isinstance(items, Mapping):
e_label.add_sub_label(process_sub_label(items))
else:
raise SyntaxError("Wrong format of e label: " + str(items))
return e_label
e_labels = []
if edges is None:
raise ValueError("Edges should be None")
if isinstance(edges, Mapping):
for label, attr in edges.items():
e_labels.append(process_label(label, attr))
else:
e_labels.append(process_label("_", edges))
return e_labels
def normalize_parameter_vertices(
vertices: Union[
Mapping[str, Union[Sequence, LoaderVariants, Mapping]],
Tuple,
LoaderVariants,
None,
]
):
"""Normalize parameters user passed in. Since parameters are very flexible, we need to be
careful about it.
Args:
vertices (Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants, None, ]):
Vertices definition.
"""
def process_label(label, items):
if isinstance(items, (Loader, str, pd.DataFrame, VineyardObject)):
return VertexLabel(label=label, loader=items)
elif isinstance(items, Sequence):
if all([isinstance(item, np.ndarray) for item in items]):
return VertexLabel(label=label, loader=items)
else:
check_argument(len(items) < 4, "Too many arguments for a vertex label")
return VertexLabel(label, *items)
elif isinstance(items, Mapping):
return VertexLabel(label, **items)
else:
raise RuntimeError("Wrong format of v label: " + str(items))
v_labels = []
if vertices is None:
return v_labels
if isinstance(vertices, Mapping):
for label, attr in vertices.items():
v_labels.append(process_label(label, attr))
else:
v_labels.append(process_label("_", vertices))
return v_labels
def load_from(
edges: Union[
Mapping[str, Union[LoaderVariants, Sequence, Mapping]], LoaderVariants, Sequence
],
vertices: Union[
Mapping[str, Union[LoaderVariants, Sequence, Mapping]],
LoaderVariants,
Sequence,
None,
] = None,
directed=True,
oid_type="int64_t",
generate_eid=True,
) -> Graph:
"""Load a Arrow property graph using a list of vertex/edge specifications.
- Use Dict of tuples to setup a graph.
We can use a dict to set vertex and edge configurations,
which can be used to build graphs.
Examples:
.. code:: ipython
g = graphscope_session.load_from(
edges={
"group": [
(
"file:///home/admin/group.e",
["group_id", "member_size"],
("leader_student_id", "student"),
("member_student_id", "student"),
),
(
"file:///home/admin/group_for_teacher_student.e",
["group_id", "group_name", "establish_date"],
("teacher_in_charge_id", "teacher"),
("member_student_id", "student"),
),
]
},
vertices={
"student": (
"file:///home/admin/student.v",
["name", "lesson_nums", "avg_score"],
"student_id",
),
"teacher": (
"file:///home/admin/teacher.v",
["name", "salary", "age"],
"teacher_id",
),
},
)
'e' is the label of edges, and 'v' is the label for vertices, edges are stored in the 'both_in_out' format
edges with label 'e' linking from 'v' to 'v'.
- Use Dict of dict to setup a graph.
We can also give each element inside the tuple a meaningful name,
makes it more understandable.
Examples:
.. code:: ipython
g = graphscope_session.load_from(
edges={
"group": [
{
"loader": "file:///home/admin/group.e",
"properties": ["group_id", "member_size"],
"source": ("leader_student_id", "student"),
"destination": ("member_student_id", "student"),
},
{
"loader": "file:///home/admin/group_for_teacher_student.e",
"properties": ["group_id", "group_name", "establish_date"],
"source": ("teacher_in_charge_id", "teacher"),
"destination": ("member_student_id", "student"),
},
]
},
vertices={
"student": {
"loader": "file:///home/admin/student.v",
"properties": ["name", "lesson_nums", "avg_score"],
"vid": "student_id",
},
"teacher": {
"loader": "file:///home/admin/teacher.v",
"properties": ["name", "salary", "age"],
"vid": "teacher_id",
},
},
)
Args:
edges: Edge configuration of the graph
vertices (optional): Vertices configurations of the graph. Defaults to None.
If None, we assume all edge's src_label and dst_label are deduced and unambiguous.
directed (bool, optional): Indicate whether the graph
should be treated as directed or undirected.
oid_type (str, optional): ID type of graph. Can be "int64_t" or "string". Defaults to "int64_t".
generate_eid (bool, optional): Whether to generate a unique edge id for each edge. Generated eid will be placed
in third column. This feature is for cooperating with interactive engine.
If you only need to work with analytical engine, set it to False. Defaults to False.
"""
# Don't import the :code:`nx` in top-level statments to improve the
# performance of :code:`import graphscope`.
from graphscope.experimental import nx
sess = get_default_session()
if sess is None:
raise ValueError("No default session found.")
if isinstance(edges, (Graph, nx.Graph, VineyardObject)):
return Graph(sess.session_id, edges)
oid_type = utils.normalize_data_type_str(oid_type)
e_labels = normalize_parameter_edges(edges)
v_labels = normalize_parameter_vertices(vertices)
e_labels, v_labels = _sanity_check(e_labels, v_labels)
config = _get_config(e_labels, v_labels, directed, oid_type, generate_eid)
op = dag_utils.create_graph(sess.session_id, types_pb2.ARROW_PROPERTY, attrs=config)
graph_def = sess.run(op)
graph = Graph(sess.session_id, graph_def)
return graph
g = load_from
|
[
"itertools.chain",
"graphscope.framework.utils.s_to_attr",
"graphscope.client.session.get_default_session",
"graphscope.framework.errors.InvalidArgumentError",
"graphscope.proto.attr_value_pb2.AttrValue",
"graphscope.framework.utils.b_to_attr",
"graphscope.framework.utils.normalize_data_type_str",
"graphscope.framework.utils.type_to_attr",
"graphscope.framework.utils.unify_type",
"graphscope.framework.dag_utils.create_graph",
"graphscope.framework.graph.Graph",
"graphscope.framework.loader.Loader",
"graphscope.framework.errors.check_argument",
"graphscope.proto.attr_value_pb2.NameAttrList"
] |
[((9787, 9816), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (9814, 9816), False, 'from graphscope.proto import attr_value_pb2\n'), ((10479, 10508), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (10506, 10508), False, 'from graphscope.proto import attr_value_pb2\n'), ((12777, 12803), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {}), '()\n', (12801, 12803), False, 'from graphscope.proto import attr_value_pb2\n'), ((12822, 12844), 'itertools.chain', 'chain', (['edges', 'vertices'], {}), '(edges, vertices)\n', (12827, 12844), False, 'from itertools import chain\n'), ((13050, 13075), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['directed'], {}), '(directed)\n', (13065, 13075), False, 'from graphscope.framework import utils\n'), ((13100, 13129), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['generate_eid'], {}), '(generate_eid)\n', (13115, 13129), False, 'from graphscope.framework import utils\n'), ((13265, 13290), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['oid_type'], {}), '(oid_type)\n', (13280, 13290), False, 'from graphscope.framework import utils\n'), ((13403, 13430), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (13418, 13430), False, 'from graphscope.framework import utils\n'), ((13475, 13497), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(False)'], {}), '(False)\n', (13490, 13497), False, 'from graphscope.framework import utils\n'), ((21800, 21821), 'graphscope.client.session.get_default_session', 'get_default_session', ([], {}), '()\n', (21819, 21821), False, 'from graphscope.client.session import get_default_session\n'), ((22018, 22057), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['oid_type'], {}), '(oid_type)\n', (22047, 22057), False, 'from graphscope.framework import utils\n'), ((22307, 22386), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['sess.session_id', 'types_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(sess.session_id, types_pb2.ARROW_PROPERTY, attrs=config)\n', (22329, 22386), False, 'from graphscope.framework import dag_utils\n'), ((22428, 22461), 'graphscope.framework.graph.Graph', 'Graph', (['sess.session_id', 'graph_def'], {}), '(sess.session_id, graph_def)\n', (22433, 22461), False, 'from graphscope.framework.graph import Graph\n'), ((7026, 7135), 'graphscope.framework.errors.check_argument', 'check_argument', (["(strategy in ('only_out', 'only_in', 'both_out_in'))", "('invalid load strategy: ' + strategy)"], {}), "(strategy in ('only_out', 'only_in', 'both_out_in'), \n 'invalid load strategy: ' + strategy)\n", (7040, 7135), False, 'from graphscope.framework.errors import check_argument\n'), ((7693, 7722), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (7720, 7722), False, 'from graphscope.proto import attr_value_pb2\n'), ((9893, 9922), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex.label'], {}), '(vertex.label)\n', (9908, 9922), False, 'from graphscope.framework import utils\n'), ((10077, 10106), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (10104, 10106), False, 'from graphscope.proto import attr_value_pb2\n'), ((10583, 10610), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['edge.label'], {}), '(edge.label)\n', (10598, 10610), False, 'from graphscope.framework import utils\n'), ((21973, 22002), 'graphscope.framework.graph.Graph', 'Graph', (['sess.session_id', 'edges'], {}), '(sess.session_id, edges)\n', (21978, 22002), False, 'from graphscope.framework.graph import Graph\n'), ((1880, 1894), 'graphscope.framework.loader.Loader', 'Loader', (['loader'], {}), '(loader)\n', (1886, 1894), False, 'from graphscope.framework.loader import Loader\n'), ((3646, 3660), 'graphscope.framework.loader.Loader', 'Loader', (['loader'], {}), '(loader)\n', (3652, 3660), False, 'from graphscope.framework.loader import Loader\n'), ((7859, 7893), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.source_label'], {}), '(self.source_label)\n', (7874, 7893), False, 'from graphscope.framework import utils\n'), ((7961, 8000), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.destination_label'], {}), '(self.destination_label)\n', (7976, 8000), False, 'from graphscope.framework import utils\n'), ((8081, 8116), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.load_strategy'], {}), '(self.load_strategy)\n', (8096, 8116), False, 'from graphscope.framework import utils\n'), ((8516, 8545), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (8543, 8545), False, 'from graphscope.proto import attr_value_pb2\n'), ((10175, 10202), 'graphscope.framework.utils.type_to_attr', 'utils.type_to_attr', (['prop[1]'], {}), '(prop[1])\n', (10193, 10202), False, 'from graphscope.framework import utils\n'), ((12384, 12516), 'graphscope.framework.errors.check_argument', 'check_argument', (['(sub_label.source_vid != sub_label.destination_vid)', '"""source col and destination col cannot refer to the same col"""'], {}), "(sub_label.source_vid != sub_label.destination_vid,\n 'source col and destination col cannot refer to the same col')\n", (12398, 12516), False, 'from graphscope.framework.errors import check_argument\n'), ((2962, 2985), 'graphscope.framework.utils.unify_type', 'utils.unify_type', (['dtype'], {}), '(dtype)\n', (2978, 2985), False, 'from graphscope.framework import utils\n'), ((6459, 6560), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Source / destination format incorrect. Expect vid or [vid, source_label]"""'], {}), "(\n 'Source / destination format incorrect. Expect vid or [vid, source_label]')\n", (6479, 6560), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((7381, 7404), 'graphscope.framework.utils.unify_type', 'utils.unify_type', (['dtype'], {}), '(dtype)\n', (7397, 7404), False, 'from graphscope.framework import utils\n'), ((8622, 8649), 'graphscope.framework.utils.type_to_attr', 'utils.type_to_attr', (['prop[1]'], {}), '(prop[1])\n', (8640, 8649), False, 'from graphscope.framework import utils\n'), ((12030, 12132), 'graphscope.framework.errors.check_argument', 'check_argument', (['(sub_label.source_label in vertex_labels)', '"""source label not found in vertex labels"""'], {}), "(sub_label.source_label in vertex_labels,\n 'source label not found in vertex labels')\n", (12044, 12132), False, 'from graphscope.framework.errors import check_argument\n'), ((12204, 12316), 'graphscope.framework.errors.check_argument', 'check_argument', (['(sub_label.destination_label in vertex_labels)', '"""destination label not found in vertex labels"""'], {}), "(sub_label.destination_label in vertex_labels,\n 'destination label not found in vertex labels')\n", (12218, 12316), False, 'from graphscope.framework.errors import check_argument\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Manage sessions to the GraphScope coordinator.
"""
import atexit
import base64
import contextlib
import copy
import json
import logging
import os
import random
import sys
import threading
import time
import warnings
from queue import Empty as EmptyQueue
try:
from kubernetes import config as kube_config
except ImportError:
kube_config = None
import graphscope
from graphscope.client.rpc import GRPCClient
from graphscope.client.utils import CaptureKeyboardInterrupt
from graphscope.client.utils import GSLogger
from graphscope.client.utils import set_defaults
from graphscope.config import GSConfig as gs_config
from graphscope.deploy.hosts.cluster import HostsClusterLauncher
from graphscope.deploy.kubernetes.cluster import KubernetesClusterLauncher
from graphscope.framework.errors import ConnectionError
from graphscope.framework.errors import FatalError
from graphscope.framework.errors import GRPCError
from graphscope.framework.errors import InteractiveEngineInternalError
from graphscope.framework.errors import InvalidArgumentError
from graphscope.framework.errors import K8sError
from graphscope.framework.errors import LearningEngineInternalError
from graphscope.framework.errors import check_argument
from graphscope.framework.graph import Graph
from graphscope.framework.operation import Operation
from graphscope.interactive.query import InteractiveQuery
from graphscope.interactive.query import InteractiveQueryStatus
from graphscope.proto import message_pb2
from graphscope.proto import op_def_pb2
from graphscope.proto import types_pb2
DEFAULT_CONFIG_FILE = os.environ.get(
"GS_CONFIG_PATH", os.path.expanduser("~/.graphscope/session.json")
)
_session_dict = {}
logger = logging.getLogger("graphscope")
class Session(object):
"""A class for interacting with GraphScope graph computation service cluster.
A :class:`Session` object encapsulates the environment in which :class:`Operation`
objects are executed/evaluated.
A session may own resources. It is important to release these resources when
they are no longer required. To do this, invoke the :meth:`close` method
on the session.
A Session can register itself as default session with :meth:`as_default`, and all operations
after that will use the default session. Session deregister itself as a default session
when closed.
The following example demonstrates its usage:
.. code:: python
>>> import graphscope as gs
>>> # use session object explicitly
>>> sess = gs.session()
>>> g = sess.g()
>>> pg = g.project(vertices={'v': []}, edges={'e': ['dist']})
>>> r = s.sssp(g, 4)
>>> s.close()
>>> # or use a session as default
>>> s = gs.session().as_default()
>>> g = g()
>>> pg = g.project(vertices={'v': []}, edges={'e': ['dist']})
>>> r = gs.sssp(pg, 4)
>>> s.close()
We support setup a service cluster and create a RPC session in following ways:
- GraphScope graph computation service run in cluster managed by kubernetes.
>>> s = graphscope.session()
Also, :class:`Session` provides several keyword params for users to define the cluster.
You may use the param :code:`k8s_gs_image` to specify the image for all engine pod, and
param :code:`k8s_engine_cpu` or :code:`k8s_engine_mem` to specify the resources. More,
you can find all params detail in :meth:`__init__` method.
>>> s = graphscope.session(
... k8s_gs_image="registry.cn-hongkong.aliyuncs.com/graphscope/graphscope:latest",
... k8s_vineyard_cpu=0.1,
... k8s_vineyard_mem="256Mi",
... k8s_vineyard_shared_mem="4Gi",
... k8s_engine_cpu=0.1,
... k8s_engine_mem="256Mi")
- or all params can be provided by a json configuration file or configuration dict.
>>> s = graphscope.session(config='/tmp/config.json')
>>> # Or
>>> s = graphscope.session(config={'k8s_engine_cpu': 5, 'k8s_engine_mem': '5Gi'})
"""
@set_defaults(gs_config)
def __init__(
self,
config=None,
cluster_type=gs_config.cluster_type,
addr=gs_config.addr,
num_workers=gs_config.num_workers,
preemptive=gs_config.preemptive,
k8s_namespace=gs_config.k8s_namespace,
k8s_service_type=gs_config.k8s_service_type,
k8s_gs_image=gs_config.k8s_gs_image,
k8s_etcd_image=gs_config.k8s_etcd_image,
k8s_gie_graph_manager_image=gs_config.k8s_gie_graph_manager_image,
k8s_zookeeper_image=gs_config.k8s_zookeeper_image,
k8s_image_pull_policy=gs_config.k8s_image_pull_policy,
k8s_image_pull_secrets=gs_config.k8s_image_pull_secrets,
k8s_coordinator_cpu=gs_config.k8s_coordinator_cpu,
k8s_coordinator_mem=gs_config.k8s_coordinator_mem,
k8s_etcd_num_pods=gs_config.k8s_etcd_num_pods,
k8s_etcd_cpu=gs_config.k8s_etcd_cpu,
k8s_etcd_mem=gs_config.k8s_etcd_mem,
k8s_zookeeper_cpu=gs_config.k8s_zookeeper_cpu,
k8s_zookeeper_mem=gs_config.k8s_zookeeper_mem,
k8s_gie_graph_manager_cpu=gs_config.k8s_gie_graph_manager_cpu,
k8s_gie_graph_manager_mem=gs_config.k8s_gie_graph_manager_mem,
k8s_vineyard_daemonset=gs_config.k8s_vineyard_daemonset,
k8s_vineyard_cpu=gs_config.k8s_vineyard_cpu,
k8s_vineyard_mem=gs_config.k8s_vineyard_mem,
k8s_vineyard_shared_mem=gs_config.k8s_vineyard_shared_mem,
k8s_engine_cpu=gs_config.k8s_engine_cpu,
k8s_engine_mem=gs_config.k8s_engine_mem,
k8s_mars_worker_cpu=gs_config.mars_worker_cpu,
k8s_mars_worker_mem=gs_config.mars_worker_mem,
k8s_mars_scheduler_cpu=gs_config.mars_scheduler_cpu,
k8s_mars_scheduler_mem=gs_config.mars_scheduler_mem,
k8s_volumes=gs_config.k8s_volumes,
k8s_waiting_for_delete=gs_config.k8s_waiting_for_delete,
timeout_seconds=gs_config.timeout_seconds,
dangling_timeout_seconds=gs_config.dangling_timeout_seconds,
with_mars=gs_config.with_mars,
**kw
):
"""Construct a new GraphScope session.
Args:
config (dict or str, optional): The configuration dict or file about how to launch the GraphScope instance.
For str, it will identify it as a path and read the configuration file to build a
session if file exist. If not specified, the global default configuration
:code:`DEFAULT_CONFIG_FILE` will be used, which get value of GS_CONFIG_PATH
in environment. Note that it will overwrite explicit parameters. Defaults to None.
addr (str, optional): The endpoint of a pre-launched GraphScope instance with '<ip>:<port>' format.
A new session id will be generated for each session connection.
cluster_type (str, optional): Deploy GraphScope instance on hosts or k8s cluster. Defaults to k8s.
Available options: "k8s" and "hosts". Note that only support deployed on localhost with hosts mode.
num_workers (int, optional): The number of workers to launch GraphScope engine. Defaults to 2.
preemptive (bool, optional): If True, GraphScope instance will treat resource params (e.g. k8s_coordinator_cpu)
as limits and provide the minimum available value as requests, but this will make pod has a `Burstable` QOS,
which can be preempted by other pods with high QOS. Otherwise, it will set both requests and limits with the
same value.
k8s_namespace (str, optional): Contains the namespace to create all resource inside.
If param missing, it will try to read namespace from kubernetes context, or
a random namespace will be created and deleted if namespace not exist.
Defaults to None.
k8s_service_type (str, optional): Type determines how the GraphScope service is exposed.
Valid options are NodePort, and LoadBalancer. Defaults to NodePort.
k8s_gs_image (str, optional): The GraphScope engine's image.
k8s_etcd_image (str, optional): The image of etcd, which used by vineyard.
k8s_image_pull_policy (str, optional): Kubernetes image pull policy. Defaults to "IfNotPresent".
k8s_image_pull_secrets (list[str], optional): A list of secret name used to authorize pull image.
k8s_gie_graph_manager_image (str, optional): The GraphScope interactive engine's graph manager image.
k8s_zookeeper_image (str, optional): The image of zookeeper, which used by GIE graph manager.
k8s_vineyard_daemonset (str, optional): The name of vineyard Helm deployment to use. GraphScope will try to
discovery the daemonset from kubernetes cluster, then use it if exists, and fallback to launching
a bundled vineyard container otherwise.
k8s_vineyard_cpu (float, optional): Minimum number of CPU cores request for vineyard container. Defaults to 0.5.
k8s_vineyard_mem (str, optional): Minimum number of memory request for vineyard container. Defaults to '512Mi'.
k8s_vineyard_shared_mem (str, optional): Init size of vineyard shared memory. Defaults to '4Gi'.
k8s_engine_cpu (float, optional): Minimum number of CPU cores request for engine container. Defaults to 0.5.
k8s_engine_mem (str, optional): Minimum number of memory request for engine container. Defaults to '4Gi'.
k8s_coordinator_cpu (float, optional): Minimum number of CPU cores request for coordinator pod. Defaults to 1.0.
k8s_coordinator_mem (str, optional): Minimum number of memory request for coordinator pod. Defaults to '4Gi'.
k8s_etcd_num_pods (int, optional): The number of etcd pods. Defaults to 3.
k8s_etcd_cpu (float, optional): Minimum number of CPU cores request for etcd pod. Defaults to 0.5.
k8s_etcd_mem (str, optional): Minimum number of memory request for etcd pod. Defaults to '128Mi'.
k8s_zookeeper_cpu (float, optional):
Minimum number of CPU cores request for zookeeper container. Defaults to 0.5.
k8s_zookeeper_mem (str, optional):
Minimum number of memory request for zookeeper container. Defaults to '256Mi'.
k8s_gie_graph_manager_cpu (float, optional):
Minimum number of CPU cores request for graphmanager container. Defaults to 1.0.
k8s_gie_graph_manager_mem (str, optional):
Minimum number of memory request for graphmanager container. Defaults to '4Gi'.
k8s_mars_worker_cpu (float, optional):
Minimum number of CPU cores request for mars worker container. Defaults to 0.5.
k8s_mars_worker_mem (str, optional):
Minimum number of memory request for mars worker container. Defaults to '4Gi'.
k8s_mars_scheduler_cpu (float, optional):
Minimum number of CPU cores request for mars scheduler container. Defaults to 0.5.
k8s_mars_scheduler_mem (str, optional):
Minimum number of memory request for mars scheduler container. Defaults to '2Gi'.
with_mars (bool, optional):
Launch graphscope with mars. Defaults to False.
k8s_volumes (dict, optional): A dict of k8s volume which represents a directory containing data, accessible to the
containers in a pod. Defaults to {}.
For example, you can mount host path with:
k8s_volumes = {
"my-data": {
"type": "hostPath",
"field": {
"path": "<path>",
"type": "Directory"
},
"mounts": [
{
"mountPath": "<path1>"
},
{
"mountPath": "<path2>"
}
]
}
}
Or you can mount PVC with:
k8s_volumes = {
"my-data": {
"type": "persistentVolumeClaim",
"field": {
"claimName": "your-pvc-name"
},
"mounts": [
{
"mountPath": "<path1>"
}
]
}
}
Also, you can mount a single volume with:
k8s_volumes = {
"my-data": {
"type": "hostPath",
"field": {xxx},
"mounts": {
"mountPath": "<path1>"
}
}
}
timeout_seconds (int, optional): For waiting service ready (or waiting for delete if
k8s_waiting_for_delete is True).
dangling_timeout_seconds (int, optional): After seconds of client disconnect,
coordinator will kill this graphscope instance. Defaults to 600.
Expect this value to be greater than 5 (heartbeat interval).
Disable dangling check by setting -1.
k8s_waiting_for_delete (bool, optional): Waiting for service delete or not. Defaults to False.
**kw (dict, optional): Other optional parameters will be put to :code:`**kw`.
- k8s_minikube_vm_driver: Deprecated.
- k8s_client_config (dict, optional):
Provide configurable parameters for connecting to remote k8s,
which strongly relies on the `kube_config.new_client_from_config` function.
eg: {"config_file": "~/.kube/config", "context": None, "persist_config": True}
config_file: Name of the kube-config file.
context: set the active context. If is set to None, current_context from config file will be used.
persist_config: If True, config file will be updated when changed(e.g GCP token refresh).
- log_level: Deprecated.
Move this param as a global configuration. Set via `graphscope.set_option(log_level='DEBUG')`
- show_log: Deprecated.
Move this param as a global configuration.Set via `graphscope.set_option(show_log=True)`
Raises:
TypeError: If the given argument combination is invalid and cannot be used to create
a GraphScope session.
"""
num_workers = int(num_workers)
self._config_params = {}
self._accessable_params = (
"addr",
"cluster_type",
"num_workers",
"preemptive",
"k8s_namespace",
"k8s_service_type",
"k8s_gs_image",
"k8s_etcd_image",
"k8s_image_pull_policy",
"k8s_image_pull_secrets",
"k8s_gie_graph_manager_image",
"k8s_zookeeper_image",
"k8s_coordinator_cpu",
"k8s_coordinator_mem",
"k8s_etcd_num_pods",
"k8s_etcd_cpu",
"k8s_etcd_mem",
"k8s_zookeeper_cpu",
"k8s_zookeeper_mem",
"k8s_gie_graph_manager_cpu",
"k8s_gie_graph_manager_mem",
"k8s_vineyard_daemonset",
"k8s_vineyard_cpu",
"k8s_vineyard_mem",
"k8s_vineyard_shared_mem",
"k8s_engine_cpu",
"k8s_engine_mem",
"k8s_mars_worker_cpu",
"k8s_mars_worker_mem",
"k8s_mars_scheduler_cpu",
"k8s_mars_scheduler_mem",
"with_mars",
"k8s_volumes",
"k8s_waiting_for_delete",
"timeout_seconds",
"dangling_timeout_seconds",
)
saved_locals = locals()
for param in self._accessable_params:
self._config_params[param] = saved_locals[param]
# parse config, which should be a path to config file, or dict
# config has highest priority
if isinstance(config, dict):
self._config_params.update(config)
elif isinstance(config, str):
self._load_config(config, False)
elif DEFAULT_CONFIG_FILE:
self._load_config(DEFAULT_CONFIG_FILE)
# update other optional params
self._config_params.update(kw)
# initial setting of cluster_type
self._cluster_type = self._parse_cluster_type()
# mars cannot work with run-on-local mode
if self._cluster_type == types_pb2.HOSTS and self._config_params["with_mars"]:
raise NotImplementedError(
"Mars cluster cannot be launched along with local GraphScope deployment"
)
# deprecated params handle
if "show_log" in kw:
warnings.warn(
"The `show_log` parameter has been deprecated and has no effect, "
"please use `graphscope.set_option(show_log=%s)` instead."
% kw.pop("show_log", None),
category=DeprecationWarning,
)
if "log_level" in kw:
warnings.warn(
"The `log_level` parameter has been deprecated and has no effect, "
"please use `graphscope.set_option(log_level=%r)` instead."
% kw.pop("show_log", None),
category=DeprecationWarning,
)
# update k8s_client_config params
self._config_params["k8s_client_config"] = kw.pop("k8s_client_config", {})
# There should be no more custom keyword arguments.
if kw:
raise ValueError("Not recognized value: ", list(kw.keys()))
if self._config_params["addr"]:
logger.info(
"Connecting graphscope session with address: %s",
self._config_params["addr"],
)
else:
logger.info(
"Initializing graphscope session with parameters: %s",
self._config_params,
)
self._closed = False
# coordinator service endpoint
self._coordinator_endpoint = None
self._launcher = None
self._heartbeat_sending_thread = None
self._grpc_client = None
self._session_id = None # unique identifier across sessions
# engine config:
#
# {
# "experiment": "ON/OFF",
# "vineyard_socket": "...",
# "vineyard_rpc_endpoint": "..."
# }
self._engine_config = None
# interactive instance related graph map
self._interactive_instance_dict = {}
# learning engine related graph map
self._learning_instance_dict = {}
self._default_session = None
atexit.register(self.close)
# create and connect session
with CaptureKeyboardInterrupt(self.close):
self._connect()
self._disconnected = False
# heartbeat
self._heartbeat_interval_seconds = 5
self._heartbeat_sending_thread = threading.Thread(
target=self._send_heartbeat, args=()
)
self._heartbeat_sending_thread.daemon = True
self._heartbeat_sending_thread.start()
def __repr__(self):
return str(self.info)
def __str__(self):
return repr(self)
@property
def session_id(self):
return self._session_id
def _load_config(self, path, slient=True):
config_path = os.path.expandvars(os.path.expanduser(path))
try:
with open(config_path, "r") as f:
data = json.load(f)
self._config_params.update(data)
except Exception as exp: # noqa
if not slient:
raise exp
def _parse_cluster_type(self):
if self._config_params["addr"] is not None:
# get the cluster type after connecting
return types_pb2.UNDEFINED
else:
if self._config_params["cluster_type"] == "hosts":
self._run_on_local()
return types_pb2.HOSTS
elif self._config_params["cluster_type"] == "k8s":
return types_pb2.K8S
else:
raise ValueError("Expect hosts or k8s of cluster_type parameter")
@property
def engine_config(self):
"""Show the engine configration associated with session in json format."""
return self._engine_config
@property
def info(self):
"""Show all resources info associated with session in json format."""
info = {}
if self._closed:
info["status"] = "closed"
elif self._grpc_client is None or self._disconnected:
info["status"] = "disconnected"
else:
info["status"] = "active"
if self._cluster_type == types_pb2.K8S:
info["type"] = "k8s"
info["engine_hosts"] = ",".join(self._pod_name_list)
info["namespace"] = self._config_params["k8s_namespace"]
else:
info["type"] = "hosts"
info["engine_hosts"] = ",".join(self._config_params["hosts"])
info["cluster_type"] = str(self._cluster_type)
info["session_id"] = self.session_id
info["num_workers"] = self._config_params["num_workers"]
info["coordinator_endpoint"] = self._coordinator_endpoint
info["engine_config"] = self._engine_config
return info
def _send_heartbeat(self):
while not self._closed:
if self._grpc_client:
try:
self._grpc_client.send_heartbeat()
except GRPCError as exc:
logger.warning(exc)
self._disconnected = True
else:
self._disconnected = False
time.sleep(self._heartbeat_interval_seconds)
def close(self):
"""Closes this session.
This method frees all resources associated with the session.
"""
if self._closed:
return
self._closed = True
self._coordinator_endpoint = None
self._deregister_default()
if self._heartbeat_sending_thread:
self._heartbeat_sending_thread.join(
timeout=self._heartbeat_interval_seconds
)
self._heartbeat_sending_thread = None
self._disconnected = True
# close all interactive instances
for instance in self._interactive_instance_dict.values():
try:
if instance is not None:
instance.close()
except InteractiveEngineInternalError:
pass
self._interactive_instance_dict.clear()
# close all learning instances
for instance in self._learning_instance_dict.values():
try:
if instance is not None:
instance.close()
except LearningEngineInternalError:
pass
self._learning_instance_dict.clear()
if self._grpc_client:
self._grpc_client.close()
self._grpc_client = None
_session_dict.pop(self._session_id, None)
# clean up
if self._config_params["addr"] is None:
if self._launcher:
self._launcher.stop()
self._pod_name_list = []
def _close_interactive_instance(self, instance):
"""Close a interactive instance."""
if self._grpc_client:
self._grpc_client.close_interactive_engine(instance.object_id)
self._interactive_instance_dict[instance.object_id] = None
def _close_learning_instance(self, instance):
"""Close a learning instance."""
if self._grpc_client:
self._grpc_client.close_learning_engine(instance.object_id)
self._learning_instance_dict[instance.object_id] = None
def __del__(self):
# cleanly ignore all exceptions
try:
self.close()
except Exception: # pylint: disable=broad-except
pass
def as_default(self):
"""Obtain a context manager that make this object as default session.
This method is used when a Session is constructed, which will immediately
install self as a default session.
Raises:
ValueError: If default session exist in current context.
Returns:
A context manager using this session as the default session.
"""
if not _default_session_stack.is_cleared():
raise ValueError(
"A default session is already active. You must explicitly call Session.close()."
)
# session context manager
self._default_session = default_session(self)
self._default_session.__enter__()
def _deregister_default(self):
"""Remove self from the default session stack."""
if self._default_session:
self._default_session.__exit__(None, None, None)
self._default_session = None
def run(self, fetch):
"""Run operations of `fetch`.
Args:
fetch: :class:`Operation`
Raises:
RuntimeError:
Client disconnect to the service. Or run on a closed session.
ValueError:
If fetch is not a instance of :class:`Operation`. Or
the fetch has been evaluated.
InvalidArgumentError:
Not recognized on output type.
Returns:
Different values for different output types of :class:`Operation`
"""
# prepare names to run and fetch
if hasattr(fetch, "op"):
fetch = fetch.op
if not isinstance(fetch, Operation):
raise ValueError("Expect a `Operation`")
if fetch.output is not None:
raise ValueError("The op <%s> are evaluated duplicated." % fetch.key)
# convert to list to be compatible with rpc client method signature
fetch_ops = [fetch]
dag = op_def_pb2.DagDef()
for op in fetch_ops:
dag.op.extend([copy.deepcopy(op.as_op_def())])
if self._closed:
raise RuntimeError("Attempted to use a closed Session.")
if not self._grpc_client:
raise RuntimeError("Session disconnected.")
# execute the query
try:
response = self._grpc_client.run(dag)
except FatalError:
self.close()
raise
check_argument(
len(fetch_ops) == 1, "Cannot execute multiple ops at the same time"
)
return self._parse_value(fetch_ops[0], response)
def _parse_value(self, op, response: message_pb2.RunStepResponse):
# attach an output to op, indicating the op is already run.
op.set_output(response.metrics)
# if loads a arrow property graph, will return {'object_id': xxxx}
if op.output_types == types_pb2.GRAPH:
return response.graph_def
if op.output_types == types_pb2.APP:
return response.result.decode("utf-8")
if op.output_types in (
types_pb2.RESULTS,
types_pb2.VINEYARD_TENSOR,
types_pb2.VINEYARD_DATAFRAME,
):
return response.result.decode("utf-8")
if op.output_types in (types_pb2.TENSOR, types_pb2.DATAFRAME):
return response.result
else:
raise InvalidArgumentError(
"Not recognized output type: %s" % op.output_types
)
def _connect(self):
if self._config_params["addr"] is not None:
# try connect to exist coordinator
self._coordinator_endpoint = self._config_params["addr"]
elif self._cluster_type == types_pb2.K8S:
if (
self._config_params["k8s_etcd_image"] is None
or self._config_params["k8s_gs_image"] is None
):
raise K8sError("None image found.")
api_client = kube_config.new_client_from_config(
**self._config_params["k8s_client_config"]
)
self._launcher = KubernetesClusterLauncher(
api_client=api_client,
namespace=self._config_params["k8s_namespace"],
service_type=self._config_params["k8s_service_type"],
num_workers=self._config_params["num_workers"],
gs_image=self._config_params["k8s_gs_image"],
preemptive=self._config_params["preemptive"],
etcd_image=self._config_params["k8s_etcd_image"],
gie_graph_manager_image=self._config_params[
"k8s_gie_graph_manager_image"
],
zookeeper_image=self._config_params["k8s_zookeeper_image"],
image_pull_policy=self._config_params["k8s_image_pull_policy"],
image_pull_secrets=self._config_params["k8s_image_pull_secrets"],
vineyard_daemonset=self._config_params["k8s_vineyard_daemonset"],
vineyard_cpu=self._config_params["k8s_vineyard_cpu"],
vineyard_mem=self._config_params["k8s_vineyard_mem"],
vineyard_shared_mem=self._config_params["k8s_vineyard_shared_mem"],
etcd_num_pods=self._config_params["k8s_etcd_num_pods"],
etcd_cpu=self._config_params["k8s_etcd_cpu"],
etcd_mem=self._config_params["k8s_etcd_mem"],
zookeeper_cpu=self._config_params["k8s_zookeeper_cpu"],
zookeeper_mem=self._config_params["k8s_zookeeper_mem"],
gie_graph_manager_cpu=self._config_params["k8s_gie_graph_manager_cpu"],
gie_graph_manager_mem=self._config_params["k8s_gie_graph_manager_mem"],
engine_cpu=self._config_params["k8s_engine_cpu"],
engine_mem=self._config_params["k8s_engine_mem"],
mars_worker_cpu=self._config_params["k8s_mars_worker_cpu"],
mars_worker_mem=self._config_params["k8s_mars_worker_mem"],
mars_scheduler_cpu=self._config_params["k8s_mars_scheduler_cpu"],
mars_scheduler_mem=self._config_params["k8s_mars_scheduler_mem"],
with_mars=self._config_params["with_mars"],
coordinator_cpu=float(self._config_params["k8s_coordinator_cpu"]),
coordinator_mem=self._config_params["k8s_coordinator_mem"],
volumes=self._config_params["k8s_volumes"],
waiting_for_delete=self._config_params["k8s_waiting_for_delete"],
timeout_seconds=self._config_params["timeout_seconds"],
dangling_timeout_seconds=self._config_params[
"dangling_timeout_seconds"
],
)
elif (
self._cluster_type == types_pb2.HOSTS
and isinstance(self._config_params["hosts"], list)
and len(self._config_params["hosts"]) != 0
and self._config_params["num_workers"] > 0
):
# lanuch coordinator with hosts
self._launcher = HostsClusterLauncher(
hosts=self._config_params["hosts"],
port=self._config_params["port"],
num_workers=self._config_params["num_workers"],
vineyard_socket=self._config_params["vineyard_socket"],
timeout_seconds=self._config_params["timeout_seconds"],
)
else:
raise RuntimeError("Session initialize failed.")
# launching graphscope service
if self._launcher is not None:
self._launcher.start()
self._coordinator_endpoint = self._launcher.coordinator_endpoint
# waiting service ready
self._grpc_client = GRPCClient(self._coordinator_endpoint)
self._grpc_client.waiting_service_ready(
timeout_seconds=self._config_params["timeout_seconds"],
)
# connect and fetch logs from rpc server
try:
(
self._session_id,
self._cluster_type,
self._engine_config,
self._pod_name_list,
self._config_params["num_workers"],
self._config_params["k8s_namespace"],
) = self._grpc_client.connect(
cleanup_instance=not bool(self._config_params["addr"]),
dangling_timeout_seconds=self._config_params[
"dangling_timeout_seconds"
],
)
# fetch logs
if self._config_params["addr"] or self._cluster_type == types_pb2.K8S:
self._grpc_client.fetch_logs()
_session_dict[self._session_id] = self
except Exception:
self.close()
raise
def get_config(self):
"""Get configuration of the session."""
return self._config_params
def g(self, incoming_data=None, oid_type="int64", directed=True, generate_eid=True):
return Graph(self, incoming_data, oid_type, directed, generate_eid)
def load_from(self, *args, **kwargs):
"""Load a graph within the session.
See more information in :meth:`graphscope.load_from`.
"""
with default_session(self):
return graphscope.load_from(*args, **kwargs)
def _run_on_local(self):
self._config_params["hosts"] = ["localhost"]
self._config_params["port"] = None
self._config_params["vineyard_socket"] = ""
def _get_gl_handle(self, graph):
"""Dump a handler for GraphLearn for interaction.
Fields in :code:`schema` are:
+ the name of node type or edge type
+ whether the graph is weighted graph
+ whether the graph is labeled graph
+ the number of int attributes
+ the number of float attributes
+ the number of string attributes
An example of the graph handle:
.. code:: python
{
"server": "127.0.0.1:8888,127.0.0.1:8889",
"client_count": 1,
"vineyard_socket": "/var/run/vineyard.sock",
"vineyard_id": 13278328736,
"node_schema": [
"user:false:false:10:0:0",
"item:true:false:0:0:5"
],
"edge_schema": [
"user:click:item:true:false:0:0:0",
"user:buy:item:true:true:0:0:0",
"item:similar:item:false:false:10:0:0"
],
"node_attribute_types": {
"person": {
"age": "i",
"name": "s",
},
},
"edge_attribute_types": {
"knows": {
"weight": "f",
},
},
}
The handle can be decoded using:
.. code:: python
base64.b64decode(handle.encode('ascii')).decode('ascii')
Note that the ports are selected from a range :code:`(8000, 9000)`.
Args:
graph (:class:`Graph`): A Property Graph.
client_number (int): Number of client.
Returns:
str: Base64 encoded handle
Raises:
InvalidArgumentError: If the graph is not loaded, or graph_type isn't
`ARROW_PROPERTY`.
"""
if not graph.loaded():
raise InvalidArgumentError("The graph has already been unloaded")
if not graph.graph_type == types_pb2.ARROW_PROPERTY:
raise InvalidArgumentError("The graph should be a property graph.")
def group_property_types(props):
weighted, labeled, i, f, s, attr_types = "false", "false", 0, 0, 0, {}
for prop in props:
if prop.type in [types_pb2.STRING]:
s += 1
attr_types[prop.name] = "s"
elif prop.type in (types_pb2.FLOAT, types_pb2.DOUBLE):
f += 1
attr_types[prop.name] = "f"
else:
i += 1
attr_types[prop.name] = "i"
if prop.name == "weight":
weighted = "true"
elif prop.name == "label":
labeled = "true"
return weighted, labeled, i, f, s, attr_types
node_schema, node_attribute_types = [], dict()
for label in graph.schema.vertex_labels:
weighted, labeled, i, f, s, attr_types = group_property_types(
graph.schema.get_vertex_properties(label)
)
node_schema.append(
"{}:{}:{}:{}:{}:{}".format(label, weighted, labeled, i, f, s)
)
node_attribute_types[label] = attr_types
edge_schema, edge_attribute_types = [], dict()
for label in graph.schema.edge_labels:
weighted, labeled, i, f, s, attr_types = group_property_types(
graph.schema.get_edge_properties(label)
)
for rel in graph.schema.get_relationships(label):
edge_schema.append(
"{}:{}:{}:{}:{}:{}:{}:{}".format(
rel[0], label, rel[1], weighted, labeled, i, f, s
)
)
edge_attribute_types[label] = attr_types
handle = {
"hosts": self.info["engine_hosts"],
"client_count": 1,
"vineyard_id": graph.vineyard_id,
"vineyard_socket": self._engine_config["vineyard_socket"],
"node_schema": node_schema,
"edge_schema": edge_schema,
"node_attribute_types": node_attribute_types,
"edge_attribute_types": edge_attribute_types,
}
handle_json_string = json.dumps(handle)
return base64.b64encode(handle_json_string.encode("utf-8")).decode("utf-8")
@set_defaults(gs_config)
def gremlin(self, graph, engine_params=None):
"""Get a interactive engine handler to execute gremlin queries.
Note that this method will be executed implicitly when a property graph created
and cache a instance of InteractiveQuery in session if `initializing_interactive_engine`
is True. If you want to create a new instance under the same graph by different params,
you should close the instance first.
.. code:: python
>>> # close and recreate InteractiveQuery.
>>> interactive_query = sess.gremlin(g)
>>> interactive_query.close()
>>> interactive_query = sess.gremlin(g, engine_params={"xxx":"xxx"})
Args:
graph (:class:`Graph`): Use the graph to create interactive instance.
engine_params (dict, optional): Configure startup parameters of interactive engine.
You can also configure this param by `graphscope.set_option(engine_params={})`.
See a list of configurable keys in
`interactive_engine/deploy/docker/dockerfile/executor.vineyard.properties`
Raises:
InvalidArgumentError: :code:`graph` is not a property graph or unloaded.
Returns:
:class:`InteractiveQuery`
"""
# self._interactive_instance_dict[graph.vineyard_id] will be None if
# InteractiveQuery closed
if (
graph.vineyard_id in self._interactive_instance_dict
and self._interactive_instance_dict[graph.vineyard_id] is not None
):
interactive_query = self._interactive_instance_dict[graph.vineyard_id]
if interactive_query.status == InteractiveQueryStatus.Running:
return interactive_query
elif interactive_query.status == InteractiveQueryStatus.Failed:
raise InteractiveEngineInternalError(interactive_query.error_msg)
else:
# Initializing.
# while True is ok, as the status is either running or failed eventually after timeout.
while True:
time.sleep(1)
if interactive_query.status == InteractiveQueryStatus.Running:
return interactive_query
elif interactive_query.status == InteractiveQueryStatus.Failed:
raise InteractiveEngineInternalError(
interactive_query.error_msg
)
if not graph.loaded():
raise InvalidArgumentError("The graph has already been unloaded")
if not graph.graph_type == types_pb2.ARROW_PROPERTY:
raise InvalidArgumentError("The graph should be a property graph.")
interactive_query = InteractiveQuery(session=self, object_id=graph.vineyard_id)
self._interactive_instance_dict[graph.vineyard_id] = interactive_query
if engine_params is not None:
engine_params = {
str(key): str(value) for key, value in engine_params.items()
}
else:
engine_params = {}
try:
response = self._grpc_client.create_interactive_engine(
object_id=graph.vineyard_id,
schema_path=graph.schema_path,
gremlin_server_cpu=gs_config.k8s_gie_gremlin_server_cpu,
gremlin_server_mem=gs_config.k8s_gie_gremlin_server_mem,
engine_params=engine_params,
)
except Exception as e:
interactive_query.status = InteractiveQueryStatus.Failed
interactive_query.error_msg = str(e)
raise InteractiveEngineInternalError(str(e)) from e
else:
interactive_query.set_frontend(
front_ip=response.frontend_host, front_port=response.frontend_port
)
interactive_query.status = InteractiveQueryStatus.Running
graph._attach_interactive_instance(interactive_query)
return interactive_query
def learning(self, graph, nodes=None, edges=None, gen_labels=None):
"""Start a graph learning engine.
Args:
nodes (list): The node types that will be used for gnn training.
edges (list): The edge types that will be used for gnn training.
gen_labels (list): Extra node and edge labels on original graph for gnn training.
Returns:
`graphscope.learning.Graph`: An instance of `graphscope.learning.Graph`
that could be feed to the learning engine.
"""
if (
graph.vineyard_id in self._learning_instance_dict
and self._learning_instance_dict[graph.vineyard_id] is not None
):
return self._learning_instance_dict[graph.vineyard_id]
if sys.platform != "linux" and sys.platform != "linux2":
raise RuntimeError(
"The learning engine currently supports Linux only, doesn't support %s"
% sys.platform
)
if not graph.loaded():
raise InvalidArgumentError("The graph has already been unloaded")
if not graph.graph_type == types_pb2.ARROW_PROPERTY:
raise InvalidArgumentError("The graph should be a property graph.")
from graphscope.learning.graph import Graph as LearningGraph
handle = self._get_gl_handle(graph)
config = LearningGraph.preprocess_args(handle, nodes, edges, gen_labels)
config = base64.b64encode(json.dumps(config).encode("utf-8")).decode("utf-8")
endpoints = self._grpc_client.create_learning_engine(
graph.vineyard_id, handle, config
)
handle = json.loads(base64.b64decode(handle.encode("utf-8")).decode("utf-8"))
handle["server"] = endpoints
handle["client_count"] = 1
learning_graph = LearningGraph(handle, config, graph.vineyard_id, self)
self._learning_instance_dict[graph.vineyard_id] = learning_graph
graph._attach_learning_instance(learning_graph)
return learning_graph
session = Session
def set_option(**kwargs):
"""Set the value of specified options.
Available options:
- num_workers
- log_level
- show_log
- k8s_namespace
- k8s_service_type
- k8s_gs_image
- k8s_etcd_image
- k8s_gie_graph_manager_image
- k8s_zookeeper_image
- k8s_image_pull_policy
- k8s_image_pull_secrets
- k8s_coordinator_cpu
- k8s_coordinator_mem
- k8s_vineyard_daemonset
- k8s_vineyard_cpu
- k8s_vineyard_mem
- k8s_vineyard_shared_mem
- k8s_engine_cpu
- k8s_engine_mem
- k8s_mars_worker_cpu
- k8s_mars_worker_mem
- k8s_mars_scheduler_cpu
- k8s_mars_scheduler_mem
- with_mars
- k8s_waiting_for_delete
- engine_params
- initializing_interactive_engine
- timeout_seconds
Args:
kwargs: dict
kv pair of GraphScope config you want to set.
Raises:
ValueError: If no such option exists.
Returns: None
"""
# check exists
for k, v in kwargs.items():
if not hasattr(gs_config, k):
raise ValueError("No such option {} exists.".format(k))
for k, v in kwargs.items():
setattr(gs_config, k, v)
GSLogger.update()
def get_option(key):
"""Get the value of specified option.
Available options:
- num_workers
- log_level
- show_log
- k8s_namespace
- k8s_service_type
- k8s_gs_image
- k8s_etcd_image
- k8s_gie_graph_manager_image
- k8s_zookeeper_image
- k8s_image_pull_policy
- k8s_image_pull_secrets
- k8s_coordinator_cpu
- k8s_coordinator_mem
- k8s_vineyard_daemonset
- k8s_vineyard_cpu
- k8s_vineyard_mem
- k8s_vineyard_shared_mem
- k8s_engine_cpu
- k8s_engine_mem
- k8s_mars_worker_cpu
- k8s_mars_worker_mem
- k8s_mars_scheduler_cpu
- k8s_mars_scheduler_mem
- with_mars
- k8s_waiting_for_delete
- engine_params
- initializing_interactive_engine
- timeout_seconds
Args:
key: str
Key of GraphScope config you want to get.
Raises:
ValueError: If no such option exists.
Returns: result: the value of the option
"""
if hasattr(gs_config, key):
return getattr(gs_config, key)
else:
raise ValueError("No such option {} exists.".format(key))
def default_session(session):
"""Python's :code:`with` handler for defining a default session.
This function provides a means of registering a session for handling
and code that need a default session calls.
The :code:`with` keyword to specify that code invocations within
the scope of a block should be executed by a particular session.
Args:
session: :class:`Session`
The session to be installed as the default session.
Returns:
A context manager for the default session.
"""
return _default_session_stack.get_controller(session)
def get_default_session():
"""Returns the default session for the current context.
Raises:
RuntimeError: Default session is not exist.
Returns:
The default :class:`Session`.
"""
return _default_session_stack.get_default()
def get_session_by_id(handle):
"""Return the session by handle."""
if handle not in _session_dict:
raise ValueError("Session not exists.")
return _session_dict.get(handle)
class _DefaultSessionStack(object):
"""A stack of objects for providing implicit defaults."""
def __init__(self):
super().__init__()
self.stack = []
def get_default(self):
if not self.stack:
raise RuntimeError("No default session found.")
return self.stack[-1]
def reset(self):
self.stack = []
def is_cleared(self):
return not self.stack
@contextlib.contextmanager
def get_controller(self, default):
"""A context manager for manipulating a default stack."""
self.stack.append(default)
try:
yield default
finally:
# stack may be empty if reset() was called
if self.stack:
self.stack.remove(default)
_default_session_stack = _DefaultSessionStack() # pylint: disable=protected-access
def g(incoming_data=None, oid_type="int64", directed=True, generate_eid=True):
return get_default_session().g(incoming_data, oid_type, directed, generate_eid)
|
[
"logging.getLogger",
"graphscope.client.rpc.GRPCClient",
"time.sleep",
"graphscope.interactive.query.InteractiveQuery",
"graphscope.proto.op_def_pb2.DagDef",
"graphscope.framework.errors.InvalidArgumentError",
"json.dumps",
"atexit.register",
"os.path.expanduser",
"graphscope.deploy.hosts.cluster.HostsClusterLauncher",
"graphscope.client.utils.CaptureKeyboardInterrupt",
"graphscope.framework.errors.K8sError",
"graphscope.client.utils.set_defaults",
"graphscope.load_from",
"graphscope.client.utils.GSLogger.update",
"graphscope.framework.errors.InteractiveEngineInternalError",
"graphscope.learning.graph.Graph.preprocess_args",
"kubernetes.config.new_client_from_config",
"graphscope.framework.graph.Graph",
"graphscope.learning.graph.Graph",
"json.load",
"threading.Thread"
] |
[((2378, 2409), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (2395, 2409), False, 'import logging\n'), ((2297, 2345), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.graphscope/session.json"""'], {}), "('~/.graphscope/session.json')\n", (2315, 2345), False, 'import os\n'), ((4779, 4802), 'graphscope.client.utils.set_defaults', 'set_defaults', (['gs_config'], {}), '(gs_config)\n', (4791, 4802), False, 'from graphscope.client.utils import set_defaults\n'), ((39313, 39336), 'graphscope.client.utils.set_defaults', 'set_defaults', (['gs_config'], {}), '(gs_config)\n', (39325, 39336), False, 'from graphscope.client.utils import set_defaults\n'), ((46774, 46791), 'graphscope.client.utils.GSLogger.update', 'GSLogger.update', ([], {}), '()\n', (46789, 46791), False, 'from graphscope.client.utils import GSLogger\n'), ((19998, 20025), 'atexit.register', 'atexit.register', (['self.close'], {}), '(self.close)\n', (20013, 20025), False, 'import atexit\n'), ((20285, 20339), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._send_heartbeat', 'args': '()'}), '(target=self._send_heartbeat, args=())\n', (20301, 20339), False, 'import threading\n'), ((27310, 27329), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (27327, 27329), False, 'from graphscope.proto import op_def_pb2\n'), ((33098, 33136), 'graphscope.client.rpc.GRPCClient', 'GRPCClient', (['self._coordinator_endpoint'], {}), '(self._coordinator_endpoint)\n', (33108, 33136), False, 'from graphscope.client.rpc import GRPCClient\n'), ((34338, 34398), 'graphscope.framework.graph.Graph', 'Graph', (['self', 'incoming_data', 'oid_type', 'directed', 'generate_eid'], {}), '(self, incoming_data, oid_type, directed, generate_eid)\n', (34343, 34398), False, 'from graphscope.framework.graph import Graph\n'), ((39204, 39222), 'json.dumps', 'json.dumps', (['handle'], {}), '(handle)\n', (39214, 39222), False, 'import json\n'), ((42138, 42197), 'graphscope.interactive.query.InteractiveQuery', 'InteractiveQuery', ([], {'session': 'self', 'object_id': 'graph.vineyard_id'}), '(session=self, object_id=graph.vineyard_id)\n', (42154, 42197), False, 'from graphscope.interactive.query import InteractiveQuery\n'), ((44793, 44856), 'graphscope.learning.graph.Graph.preprocess_args', 'LearningGraph.preprocess_args', (['handle', 'nodes', 'edges', 'gen_labels'], {}), '(handle, nodes, edges, gen_labels)\n', (44822, 44856), True, 'from graphscope.learning.graph import Graph as LearningGraph\n'), ((45246, 45300), 'graphscope.learning.graph.Graph', 'LearningGraph', (['handle', 'config', 'graph.vineyard_id', 'self'], {}), '(handle, config, graph.vineyard_id, self)\n', (45259, 45300), True, 'from graphscope.learning.graph import Graph as LearningGraph\n'), ((20076, 20112), 'graphscope.client.utils.CaptureKeyboardInterrupt', 'CaptureKeyboardInterrupt', (['self.close'], {}), '(self.close)\n', (20100, 20112), False, 'from graphscope.client.utils import CaptureKeyboardInterrupt\n'), ((20729, 20753), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (20747, 20753), False, 'import os\n'), ((23064, 23108), 'time.sleep', 'time.sleep', (['self._heartbeat_interval_seconds'], {}), '(self._heartbeat_interval_seconds)\n', (23074, 23108), False, 'import time\n'), ((28718, 28790), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('Not recognized output type: %s' % op.output_types)"], {}), "('Not recognized output type: %s' % op.output_types)\n", (28738, 28790), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((34615, 34652), 'graphscope.load_from', 'graphscope.load_from', (['*args'], {}), '(*args, **kwargs)\n', (34635, 34652), False, 'import graphscope\n'), ((36812, 36871), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph has already been unloaded"""'], {}), "('The graph has already been unloaded')\n", (36832, 36871), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((36951, 37012), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph should be a property graph."""'], {}), "('The graph should be a property graph.')\n", (36971, 37012), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((41908, 41967), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph has already been unloaded"""'], {}), "('The graph has already been unloaded')\n", (41928, 41967), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((42047, 42108), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph should be a property graph."""'], {}), "('The graph should be a property graph.')\n", (42067, 42108), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((44460, 44519), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph has already been unloaded"""'], {}), "('The graph has already been unloaded')\n", (44480, 44519), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((44599, 44660), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph should be a property graph."""'], {}), "('The graph should be a property graph.')\n", (44619, 44660), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((20837, 20849), 'json.load', 'json.load', (['f'], {}), '(f)\n', (20846, 20849), False, 'import json\n'), ((29298, 29376), 'kubernetes.config.new_client_from_config', 'kube_config.new_client_from_config', ([], {}), "(**self._config_params['k8s_client_config'])\n", (29332, 29376), True, 'from kubernetes import config as kube_config\n'), ((29243, 29272), 'graphscope.framework.errors.K8sError', 'K8sError', (['"""None image found."""'], {}), "('None image found.')\n", (29251, 29272), False, 'from graphscope.framework.errors import K8sError\n'), ((32425, 32689), 'graphscope.deploy.hosts.cluster.HostsClusterLauncher', 'HostsClusterLauncher', ([], {'hosts': "self._config_params['hosts']", 'port': "self._config_params['port']", 'num_workers': "self._config_params['num_workers']", 'vineyard_socket': "self._config_params['vineyard_socket']", 'timeout_seconds': "self._config_params['timeout_seconds']"}), "(hosts=self._config_params['hosts'], port=self.\n _config_params['port'], num_workers=self._config_params['num_workers'],\n vineyard_socket=self._config_params['vineyard_socket'], timeout_seconds\n =self._config_params['timeout_seconds'])\n", (32445, 32689), False, 'from graphscope.deploy.hosts.cluster import HostsClusterLauncher\n'), ((41222, 41281), 'graphscope.framework.errors.InteractiveEngineInternalError', 'InteractiveEngineInternalError', (['interactive_query.error_msg'], {}), '(interactive_query.error_msg)\n', (41252, 41281), False, 'from graphscope.framework.errors import InteractiveEngineInternalError\n'), ((41484, 41497), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41494, 41497), False, 'import time\n'), ((44891, 44909), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (44901, 44909), False, 'import json\n'), ((41744, 41803), 'graphscope.framework.errors.InteractiveEngineInternalError', 'InteractiveEngineInternalError', (['interactive_query.error_msg'], {}), '(interactive_query.error_msg)\n', (41774, 41803), False, 'from graphscope.framework.errors import InteractiveEngineInternalError\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is referred and derived from project NetworkX,
#
# https://github.com/networkx/networkx/blob/master/networkx/readwrite/adjlist.py
#
# which has the following license:
#
# Copyright (C) 2004-2020, NetworkX Developers
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# All rights reserved.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
#
from numbers import Number
# fmt: off
from networkx.drawing.nx_pylab import draw as _draw
from networkx.drawing.nx_pylab import draw_networkx as _draw_networkx
from networkx.drawing.nx_pylab import \
draw_networkx_edge_labels as _draw_networkx_edge_labels
from networkx.drawing.nx_pylab import draw_networkx_edges as _draw_networkx_edges
from networkx.drawing.nx_pylab import draw_networkx_labels as _draw_networkx_labels
from networkx.drawing.nx_pylab import draw_networkx_nodes as _draw_networkx_nodes
from graphscope import nx
from graphscope.nx.drawing.layout import circular_layout
from graphscope.nx.drawing.layout import kamada_kawai_layout
from graphscope.nx.drawing.layout import planar_layout
from graphscope.nx.drawing.layout import random_layout
from graphscope.nx.drawing.layout import shell_layout
from graphscope.nx.drawing.layout import spectral_layout
from graphscope.nx.drawing.layout import spring_layout
from graphscope.nx.utils.compat import with_graphscope_nx_context
# fmt: on
__all__ = [
"draw",
"draw_networkx",
"draw_networkx_nodes",
"draw_networkx_edges",
"draw_networkx_labels",
"draw_networkx_edge_labels",
"draw_circular",
"draw_kamada_kawai",
"draw_random",
"draw_spectral",
"draw_spring",
"draw_planar",
"draw_shell",
]
def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None):
"""Apply an alpha (or list of alphas) to the colors provided.
Parameters
----------
colors : color string, or array of floats
Color of element. Can be a single color format string (default='r'),
or a sequence of colors with the same length as nodelist.
If numeric values are specified they will be mapped to
colors using the cmap and vmin,vmax parameters. See
matplotlib.scatter for more details.
alpha : float or array of floats
Alpha values for elements. This can be a single alpha value, in
which case it will be applied to all the elements of color. Otherwise,
if it is an array, the elements of alpha will be applied to the colors
in order (cycling through alpha multiple times if necessary).
elem_list : array of networkx objects
The list of elements which are being colored. These could be nodes,
edges or labels.
cmap : matplotlib colormap
Color map for use if colors is a list of floats corresponding to points
on a color mapping.
vmin, vmax : float
Minimum and maximum values for normalizing colors if a color mapping is
used.
Returns
-------
rgba_colors : numpy ndarray
Array containing RGBA format values for each of the node colours.
"""
from itertools import cycle
from itertools import islice
try:
import matplotlib.cm as cm
import numpy as np
from matplotlib.colors import colorConverter
except ImportError as e:
raise ImportError("Matplotlib required for draw()") from e
# If we have been provided with a list of numbers as long as elem_list,
# apply the color mapping.
if len(colors) == len(elem_list) and isinstance(colors[0], Number):
mapper = cm.ScalarMappable(cmap=cmap)
mapper.set_clim(vmin, vmax)
rgba_colors = mapper.to_rgba(colors)
# Otherwise, convert colors to matplotlib's RGB using the colorConverter
# object. These are converted to numpy ndarrays to be consistent with the
# to_rgba method of ScalarMappable.
else:
try:
rgba_colors = np.array([colorConverter.to_rgba(colors)])
except ValueError:
rgba_colors = np.array([colorConverter.to_rgba(color) for color in colors])
# Set the final column of the rgba_colors to have the relevant alpha values
try:
# If alpha is longer than the number of colors, resize to the number of
# elements. Also, if rgba_colors.size (the number of elements of
# rgba_colors) is the same as the number of elements, resize the array,
# to avoid it being interpreted as a colormap by scatter()
if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list):
rgba_colors = np.resize(rgba_colors, (len(elem_list), 4))
rgba_colors[1:, 0] = rgba_colors[0, 0]
rgba_colors[1:, 1] = rgba_colors[0, 1]
rgba_colors[1:, 2] = rgba_colors[0, 2]
rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors)))
except TypeError:
rgba_colors[:, -1] = alpha
return rgba_colors
@with_graphscope_nx_context(_draw_networkx_nodes)
def draw_networkx_nodes(
G,
pos,
nodelist=None,
node_size=300,
node_color="#1f78b4",
node_shape="o",
alpha=None,
cmap=None,
vmin=None,
vmax=None,
ax=None,
linewidths=None,
edgecolors=None,
label=None,
):
pass
@with_graphscope_nx_context(_draw_networkx_edges)
def draw_networkx_edges(
G,
pos,
edgelist=None,
width=1.0,
edge_color="k",
style="solid",
alpha=None,
arrowstyle="-|>",
arrowsize=10,
edge_cmap=None,
edge_vmin=None,
edge_vmax=None,
ax=None,
arrows=True,
label=None,
node_size=300,
nodelist=None,
node_shape="o",
connectionstyle=None,
min_source_margin=0,
min_target_margin=0,
):
pass
@with_graphscope_nx_context(_draw_networkx_labels)
def draw_networkx_labels(
G,
pos,
labels=None,
font_size=12,
font_color="k",
font_family="sans-serif",
font_weight="normal",
alpha=None,
bbox=None,
horizontalalignment="center",
verticalalignment="center",
ax=None,
):
pass
@with_graphscope_nx_context(_draw)
def draw(G, pos=None, ax=None, **kwds):
pass
@with_graphscope_nx_context(_draw_networkx)
def draw_networkx(G, pos=None, arrows=True, with_labels=True, **kwds):
pass
@with_graphscope_nx_context(_draw_networkx_edge_labels)
def draw_networkx_edge_labels(
G,
pos,
edge_labels=None,
label_pos=0.5,
font_size=10,
font_color="k",
font_family="sans-serif",
font_weight="normal",
alpha=None,
bbox=None,
horizontalalignment="center",
verticalalignment="center",
ax=None,
rotate=True,
):
pass
def draw_circular(G, **kwargs):
draw(G, circular_layout(G), **kwargs)
def draw_kamada_kawai(G, **kwargs):
draw(G, kamada_kawai_layout(G), **kwargs)
def draw_random(G, **kwargs):
draw(G, random_layout(G), **kwargs)
def draw_spectral(G, **kwargs):
draw(G, spectral_layout(G), **kwargs)
def draw_spring(G, **kwargs):
draw(G, spring_layout(G), **kwargs)
def draw_shell(G, **kwargs):
nlist = kwargs.get("nlist", None)
if nlist is not None:
del kwargs["nlist"]
draw(G, shell_layout(G, nlist=nlist), **kwargs)
def draw_planar(G, **kwargs):
draw(G, planar_layout(G), **kwargs)
|
[
"itertools.cycle",
"graphscope.nx.drawing.layout.circular_layout",
"graphscope.nx.drawing.layout.spectral_layout",
"graphscope.nx.drawing.layout.kamada_kawai_layout",
"graphscope.nx.drawing.layout.spring_layout",
"graphscope.nx.drawing.layout.planar_layout",
"matplotlib.colors.colorConverter.to_rgba",
"graphscope.nx.utils.compat.with_graphscope_nx_context",
"matplotlib.cm.ScalarMappable",
"graphscope.nx.drawing.layout.random_layout",
"graphscope.nx.drawing.layout.shell_layout"
] |
[((5044, 5092), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_nodes'], {}), '(_draw_networkx_nodes)\n', (5070, 5092), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((5365, 5413), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_edges'], {}), '(_draw_networkx_edges)\n', (5391, 5413), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((5839, 5888), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_labels'], {}), '(_draw_networkx_labels)\n', (5865, 5888), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((6167, 6200), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw'], {}), '(_draw)\n', (6193, 6200), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((6253, 6295), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx'], {}), '(_draw_networkx)\n', (6279, 6295), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((6379, 6433), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_edge_labels'], {}), '(_draw_networkx_edge_labels)\n', (6405, 6433), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((3682, 3710), 'matplotlib.cm.ScalarMappable', 'cm.ScalarMappable', ([], {'cmap': 'cmap'}), '(cmap=cmap)\n', (3699, 3710), True, 'import matplotlib.cm as cm\n'), ((6801, 6819), 'graphscope.nx.drawing.layout.circular_layout', 'circular_layout', (['G'], {}), '(G)\n', (6816, 6819), False, 'from graphscope.nx.drawing.layout import circular_layout\n'), ((6881, 6903), 'graphscope.nx.drawing.layout.kamada_kawai_layout', 'kamada_kawai_layout', (['G'], {}), '(G)\n', (6900, 6903), False, 'from graphscope.nx.drawing.layout import kamada_kawai_layout\n'), ((6959, 6975), 'graphscope.nx.drawing.layout.random_layout', 'random_layout', (['G'], {}), '(G)\n', (6972, 6975), False, 'from graphscope.nx.drawing.layout import random_layout\n'), ((7033, 7051), 'graphscope.nx.drawing.layout.spectral_layout', 'spectral_layout', (['G'], {}), '(G)\n', (7048, 7051), False, 'from graphscope.nx.drawing.layout import spectral_layout\n'), ((7107, 7123), 'graphscope.nx.drawing.layout.spring_layout', 'spring_layout', (['G'], {}), '(G)\n', (7120, 7123), False, 'from graphscope.nx.drawing.layout import spring_layout\n'), ((7270, 7298), 'graphscope.nx.drawing.layout.shell_layout', 'shell_layout', (['G'], {'nlist': 'nlist'}), '(G, nlist=nlist)\n', (7282, 7298), False, 'from graphscope.nx.drawing.layout import shell_layout\n'), ((7354, 7370), 'graphscope.nx.drawing.layout.planar_layout', 'planar_layout', (['G'], {}), '(G)\n', (7367, 7370), False, 'from graphscope.nx.drawing.layout import planar_layout\n'), ((4928, 4940), 'itertools.cycle', 'cycle', (['alpha'], {}), '(alpha)\n', (4933, 4940), False, 'from itertools import cycle\n'), ((4047, 4077), 'matplotlib.colors.colorConverter.to_rgba', 'colorConverter.to_rgba', (['colors'], {}), '(colors)\n', (4069, 4077), False, 'from matplotlib.colors import colorConverter\n'), ((4143, 4172), 'matplotlib.colors.colorConverter.to_rgba', 'colorConverter.to_rgba', (['color'], {}), '(color)\n', (4165, 4172), False, 'from matplotlib.colors import colorConverter\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is referred and derived from project NetworkX
#
# which has the following license:
#
# Copyright (C) 2004-2020, NetworkX Developers
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# All rights reserved.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
#
import networkx.readwrite.tests.test_gml
import pytest
from networkx.readwrite.tests.test_gml import TestGraph
from graphscope import nx
from graphscope.nx.utils.compat import import_as_graphscope_nx
from graphscope.nx.utils.compat import with_graphscope_nx_context
import_as_graphscope_nx(
networkx.readwrite.tests.test_gml,
decorators=pytest.mark.usefixtures("graphscope_session"),
)
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestGraph)
class TestGraph:
def test_tuplelabels(self):
# https://github.com/networkx/networkx/pull/1048
# Writing tuple labels to GML failed.
G = nx.Graph()
G.add_edge((0, 1), (1, 0))
data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
answer = """graph [
node [
id 0
label "(0,1)"
]
node [
id 1
label "(1,0)"
]
edge [
source 0
target 1
]
]"""
assert data == answer
def test_data_types(self):
# NB: json can't use tuple, byte as key
data = [
True,
False,
10**10, # 10 ** 20 overflow on folly::dynamic
-2e33,
"'",
'"&&&""',
[{"\xfd": "\x7f", chr(0x4444): [1, 2]}, [2, "3"]],
]
try: # fails under IronPython
data.append(chr(0x14444))
except ValueError:
data.append(chr(0x1444))
G = nx.Graph()
G.name = data
G.graph["data"] = data
print(dict(data=data))
G.add_node(0, int=-1, data=dict(data=data))
G.add_edge(0, 0, float=-2.5, data=data)
gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
G = nx.parse_gml(gml, destringizer=literal_destringizer)
assert data == G.name
assert {"name": data, "data": data} == G.graph
assert list(G.nodes(data=True)) == [(0, dict(int=-1, data=dict(data=data)))]
assert list(G.edges(data=True)) == [(0, 0, dict(float=-2.5, data=data))]
G = nx.Graph()
G.graph["data"] = "frozenset([1, 2, 3])"
G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
assert G.graph["data"] == "frozenset([1, 2, 3])"
def test_tuplelabels(self):
# https://github.com/networkx/networkx/pull/1048
# Writing tuple labels to GML failed.
G = nx.Graph()
G.add_edge((0, 1), (1, 0))
data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
answer = (
"""graph [
node [
id 0
label "(0,1)"
]
node [
id 1
label "(1,0)"
]
edge [
source 0
target 1
]
]""",
"""graph [
node [
id 0
label "(1,0)"
]
node [
id 1
label "(0,1)"
]
edge [
source 0
target 1
]
]""",
)
assert data in answer
@pytest.mark.skip(
reason="the folly json serialization does not support to keep the decimal point in SHORTEST mode, keep record on issue #1167"
)
def test_float_label(self):
node = 1.0
G = nx.Graph()
G.add_node(node)
fobj = tempfile.NamedTemporaryFile()
nx.write_gml(G, fobj)
fobj.seek(0)
# Should be bytes in 2.x and 3.x
data = fobj.read().strip().decode("ascii")
answer = """graph [
node [
id 0
label "1"
]
]"""
assert data == answer
@pytest.mark.skip(reason="rapidjson not support inf.")
def test_special_float_label(self):
pass
|
[
"pytest.mark.skip",
"graphscope.nx.utils.compat.with_graphscope_nx_context",
"pytest.mark.usefixtures",
"graphscope.nx.write_gml",
"graphscope.nx.generate_gml",
"graphscope.nx.parse_gml",
"graphscope.nx.Graph"
] |
[((799, 844), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (822, 844), False, 'import pytest\n'), ((846, 883), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGraph'], {}), '(TestGraph)\n', (872, 883), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((3255, 3408), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""the folly json serialization does not support to keep the decimal point in SHORTEST mode, keep record on issue #1167"""'}), "(reason=\n 'the folly json serialization does not support to keep the decimal point in SHORTEST mode, keep record on issue #1167'\n )\n", (3271, 3408), False, 'import pytest\n'), ((3805, 3858), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""rapidjson not support inf."""'}), "(reason='rapidjson not support inf.')\n", (3821, 3858), False, 'import pytest\n'), ((747, 792), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (770, 792), False, 'import pytest\n'), ((1048, 1058), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1056, 1058), False, 'from graphscope import nx\n'), ((1837, 1847), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1845, 1847), False, 'from graphscope import nx\n'), ((2119, 2171), 'graphscope.nx.parse_gml', 'nx.parse_gml', (['gml'], {'destringizer': 'literal_destringizer'}), '(gml, destringizer=literal_destringizer)\n', (2131, 2171), False, 'from graphscope import nx\n'), ((2435, 2445), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (2443, 2445), False, 'from graphscope import nx\n'), ((2772, 2782), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (2780, 2782), False, 'from graphscope import nx\n'), ((3476, 3486), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (3484, 3486), False, 'from graphscope import nx\n'), ((3565, 3586), 'graphscope.nx.write_gml', 'nx.write_gml', (['G', 'fobj'], {}), '(G, fobj)\n', (3577, 3586), False, 'from graphscope import nx\n'), ((1119, 1168), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {'stringizer': 'literal_stringizer'}), '(G, stringizer=literal_stringizer)\n', (1134, 1168), False, 'from graphscope import nx\n'), ((2056, 2105), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {'stringizer': 'literal_stringizer'}), '(G, stringizer=literal_stringizer)\n', (2071, 2105), False, 'from graphscope import nx\n'), ((2520, 2538), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {}), '(G)\n', (2535, 2538), False, 'from graphscope import nx\n'), ((2843, 2892), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {'stringizer': 'literal_stringizer'}), '(G, stringizer=literal_stringizer)\n', (2858, 2892), False, 'from graphscope import nx\n')]
|
import networkx.generators.tests.test_geometric
import pytest
from networkx.generators.tests.test_geometric import TestNavigableSmallWorldGraph
from graphscope.framework.errors import UnimplementedError
from graphscope.nx.utils.compat import import_as_graphscope_nx
from graphscope.nx.utils.compat import with_graphscope_nx_context
import_as_graphscope_nx(
networkx.generators.tests.test_geometric,
decorators=pytest.mark.usefixtures("graphscope_session"),
)
@pytest.mark.usefixtures("graphscope_session")
@with_graphscope_nx_context(TestNavigableSmallWorldGraph)
class TestNavigableSmallWorldGraph:
def test_navigable_small_world(self):
with pytest.raises(UnimplementedError):
G = nx.navigable_small_world_graph(5, p=1, q=0, seed=42)
|
[
"graphscope.nx.utils.compat.with_graphscope_nx_context",
"pytest.raises",
"pytest.mark.usefixtures"
] |
[((472, 517), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (495, 517), False, 'import pytest\n'), ((519, 575), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestNavigableSmallWorldGraph'], {}), '(TestNavigableSmallWorldGraph)\n', (545, 575), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((420, 465), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (443, 465), False, 'import pytest\n'), ((667, 700), 'pytest.raises', 'pytest.raises', (['UnimplementedError'], {}), '(UnimplementedError)\n', (680, 700), False, 'import pytest\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pytest
from networkx.exception import NetworkXError
import graphscope
import graphscope.nx as nx
from graphscope.client.session import g
from graphscope.client.session import get_default_session
from graphscope.framework.errors import InvalidArgumentError
from graphscope.framework.loader import Loader
from graphscope.proto import graph_def_pb2
def ldbc_sample_single_label(prefix, directed):
graph = graphscope.g(directed=directed, generate_eid=False)
graph = graph.add_vertices(
Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment"
)
graph = graph.add_edges(
Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"),
"replyOf",
)
return graph
def ldbc_sample_string_oid(prefix, directed):
graph = graphscope.g(directed=directed, oid_type="string", generate_eid=False)
graph = graph.add_vertices(
Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment"
)
graph = graph.add_edges(
Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"),
"replyOf",
)
return graph
def ldbc_sample_single_label_with_sess(sess, prefix, directed):
graph = sess.g(directed=directed, generate_eid=False)
graph = graph.add_vertices(
Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment"
)
graph = graph.add_edges(
Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"),
"replyOf",
)
return graph
def ldbc_sample_multi_labels(prefix, directed):
graph = graphscope.g(directed=directed, generate_eid=False)
graph = (
graph.add_vertices(
Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"),
"comment",
["creationDate", "locationIP", "browserUsed", "content", "length"],
)
.add_vertices(
Loader(os.path.join(prefix, "person_0_0.csv"), delimiter="|"),
"person",
[
"firstName",
"lastName",
"gender",
("birthday", str),
"creationDate",
"locationIP",
"browserUsed",
],
)
.add_vertices(
Loader(os.path.join(prefix, "post_0_0.csv"), delimiter="|"),
"post",
[
"imageFile",
"creationDate",
"locationIP",
"browserUsed",
"language",
"content",
"length",
],
)
)
graph = (
graph.add_edges(
Loader(
os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"
),
"replyOf",
src_label="comment",
dst_label="comment",
)
.add_edges(
Loader(os.path.join(prefix, "person_knows_person_0_0.csv"), delimiter="|"),
"knows",
["creationDate"],
src_label="person",
dst_label="person",
)
.add_edges(
Loader(os.path.join(prefix, "comment_replyOf_post_0_0.csv"), delimiter="|"),
"replyOf2",
src_label="comment",
dst_label="post",
)
)
return graph
def load_p2p(prefix, directed):
graph = graphscope.load_from(
edges={
"group": {
"loader": Loader(
os.path.join(prefix, "p2p-31.e"), header_row=False, delimiter=" "
)
}
},
directed=directed,
generate_eid=False,
)
return graph
@pytest.mark.usefixtures("graphscope_session")
class TestGraphTransformation(object):
@classmethod
def setup_class(cls):
cls.NXGraph = nx.Graph
cls.data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample")
cls.single_label_g = ldbc_sample_single_label(cls.data_dir, False)
cls.multi_label_g = ldbc_sample_multi_labels(cls.data_dir, False)
cls.p2p = load_p2p(os.path.expandvars("${GS_TEST_DIR}"), False)
cls.p2p_nx = nx.read_edgelist(
os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"),
nodetype=int,
data=True,
)
cls.str_oid_g = ldbc_sample_string_oid(cls.data_dir, False)
@classmethod
def teardown_class(cls):
cls.single_label_g.unload()
cls.multi_label_g.unload()
cls.str_oid_g.unload()
def assert_convert_success(self, gs_g, nx_g):
assert gs_g.is_directed() == nx_g.is_directed()
assert self._schema_equal(gs_g.schema, nx_g.schema)
def _schema_equal(self, gs_schema, nx_schema):
v_props = {}
for entry in gs_schema._valid_vertex_labels():
for prop in entry.properties:
v_props[prop.name] = prop.type
e_props = {}
for entry in gs_schema._valid_edge_labels():
for prop in entry.properties:
e_props[prop.name] = prop.type
gs_v_props = {
prop.name: prop.type
for prop in list(nx_schema._valid_vertex_labels())[0].properties
}
gs_e_props = {
prop.name: prop.type
for prop in list(nx_schema._valid_edge_labels())[0].properties
}
return v_props == gs_v_props and e_props == gs_e_props
# nx to gs
def test_empty_nx_to_gs(self):
empty_nx_g = self.NXGraph(dist=True)
gs_g = g(empty_nx_g)
self.assert_convert_success(gs_g, empty_nx_g)
def test_only_contains_nodes_nx_to_gs(self):
nx_g = self.NXGraph(dist=True)
nx_g.add_nodes_from(range(100), type="node")
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
def test_simple_nx_to_gs(self):
nx_g = nx.complete_graph(10, create_using=self.NXGraph)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
def test_int_node_nx_to_gs(self):
nx_g = self.NXGraph(dist=True)
nx_g.add_nodes_from(range(10), foo="star")
nx_g.add_edges_from(
[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8), (8, 9)],
weight=3.14,
)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
def test_str_node_nx_to_gs(self):
nx_g = nx.les_miserables_graph()
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
def test_complete_nx_to_gs(self):
# multi-propery, node propery and edge propty both aligned
nodes = [
(0, {"vp1": 1, "vp2": "v", "vp3": 3.14}),
(1, {"vp1": 1, "vp2": "v", "vp3": 3.14}),
(2, {"vp1": 1, "vp2": "v", "vp3": 3.14}),
]
edges = [
(0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}),
(0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}),
(1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}),
]
nx_g = self.NXGraph(dist=True)
nx_g.update(edges, nodes)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
# node property aliged, edge not aliged
nx_g2 = nx_g.copy()
nx_g2.add_edge(0, 1, ep4="new propery")
gs_g2 = g(nx_g2)
self.assert_convert_success(gs_g2, nx_g2)
# edge property aliged, node not aliged
nx_g3 = nx_g.copy()
nx_g3.add_node(2, vp4="new propery")
gs_g3 = g(nx_g3)
self.assert_convert_success(gs_g3, nx_g3)
# both not aliged
nx_g4 = nx_g.copy()
nx_g4.add_edge(0, 1, ep4="new propery")
nx_g4.add_node(2, vp4="new propery")
gs_g4 = g(nx_g4)
self.assert_convert_success(gs_g4, nx_g4)
def test_nx_to_gs_after_modify(self):
nx_g = self.NXGraph(dist=True)
nodes = [
(0, {"vp1": 1, "vp2": "v", "vp3": 3.14}),
(1, {"vp1": 1, "vp2": "v", "vp3": 3.14}),
(2, {"vp1": 1, "vp2": "v", "vp3": 3.14}),
]
# add nodes
nx_g.add_nodes_from(nodes)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
# add_edges
edges = [
(0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}),
(0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}),
(1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}),
]
nx_g.add_edges_from(edges)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
# remove edge
nx_g.remove_edge(0, 1)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
# remove node
nx_g.remove_node(0)
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
# clear
nx_g.clear()
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g)
def test_nx_to_gs_remove_nodes(self):
nx_g = self.NXGraph(dist=True)
nx_g.add_nodes_from(range(10)) # all nodes are int
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g) # success
nx_g.add_node("str_node") # add a str node
with pytest.raises(
RuntimeError,
match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph",
):
gs_g = g(nx_g) # mixing oid type, failed
nx_g.remove_node("str_node") # remove str node, all nodes are int again
gs_g = g(nx_g)
self.assert_convert_success(gs_g, nx_g) # success
def test_error_on_view_to_gs(self):
nx_g = self.NXGraph(dist=True)
nx_g._graph = None # graph view always has a _graph attribute
nx_g._is_client_view = False
with pytest.raises(TypeError, match="graph view can not convert to gs graph"):
gs_g = g(nx_g)
def test_error_on_mixing_node_nx_to_gs(self):
nx_g = self.NXGraph(dist=True)
nx_g.add_node(0, weight=1.23)
nx_g.add_node("zakky", foo="node")
with pytest.raises(
RuntimeError,
match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph",
):
gs_g = g(nx_g)
# gs to nx
def test_empty_gs_to_nx(self):
empty_nx = self.NXGraph(dist=True)
empty_gs_graph = g(empty_nx)
G = self.NXGraph(empty_gs_graph)
self.assert_convert_success(empty_gs_graph, G)
def test_single_label_gs_to_nx(self):
G = self.NXGraph(self.single_label_g)
assert G.number_of_nodes() == 76830
assert G.number_of_edges() == 38786
assert 618475290625 not in G
assert ("comment", 618475290625) in G
G2 = self.NXGraph(self.single_label_g, default_label="comment")
assert G2.number_of_nodes() == 76830
assert G2.number_of_edges() == 38786
assert 618475290625 in G2
assert ("comment", 618475290625) not in G2
def test_multi_label_gs_to_nx(self):
G = self.NXGraph(self.multi_label_g)
assert G.number_of_nodes() == (76830 + 903 + 78976)
assert G.number_of_edges() == (38786 + 6626 + 38044)
assert 618475290625 not in G # comment node is (label, id) format
assert ("comment", 618475290625) in G
assert 933 not in G # person node is (label, id) format
assert ("person", 933) in G
assert 618475290624 not in G # post node is (label, id) format
assert ("post", 618475290624) in G
G2 = self.NXGraph(self.multi_label_g, default_label="comment")
assert G2.number_of_nodes() == (76830 + 903 + 78976)
assert G2.number_of_edges() == (38786 + 6626 + 38044)
assert 618475290625 in G2 # comment node is default label node
assert ("comment", 618475290625) not in G2
assert 933 not in G2 # person node is (label, id) format
assert ("person", 933) in G2
assert 618475290624 not in G2 # post node is (label, id) format
assert ("post", 618475290624) in G
@pytest.mark.skipif(
os.environ.get("DEPLOYMENT", None) == "standalone",
reason="FIXME(weibin): ci runner failed",
)
def test_report_methods_on_copy_on_write_strategy(self):
G = self.NXGraph(self.multi_label_g, default_label="person")
assert G.graph_type == graph_def_pb2.ARROW_PROPERTY
# test NODE_NUM and EDGE_NUM
assert G.number_of_nodes() == (76830 + 903 + 78976)
assert G.number_of_edges() == (38786 + 6626 + 38044)
# test HAS_NODE and HAS_EDGE
assert 0 not in G
assert 933 in G
assert ("person", 933) not in G # deault node must be non-tuple format
assert ("random", 933) not in G
assert G.has_edge(933, 4398046511628)
assert G.has_edge(("comment", 618475290625), ("post", 618475290624))
assert not G.has_edge(933, ("post", 618475290624))
# test GET_NODE_DATA and GET_EDGE_DATA
assert G.get_node_data(933) == {
"browserUsed": "Firefox",
"locationIP": "172.16.58.3",
"creationDate": "2010-02-14T15:32:10.447+0000",
"birthday": "1989-12-03",
"gender": "male",
"lastName": "Perera",
"firstName": "Mahinda",
}
assert G.get_edge_data(933, 4398046511628) == {
"creationDate": "2010-07-30T15:19:53.298+0000",
}
assert sorted(list(G.neighbors(933))) == [
4398046511628,
8796093023017,
28587302322537,
]
if G.is_directed():
assert sorted(list(G.predecessors(4398046511628))) == [
318,
933,
987,
2199023256530,
]
G.add_node(0) # modify graph to make copy on write
assert G.graph_type == graph_def_pb2.DYNAMIC_PROPERTY
assert G.number_of_nodes() == (76831 + 903 + 78976)
assert G.number_of_edges() == (38786 + 6626 + 38044)
# test HAS_NODE and HAS_EDGE
assert 0 in G
assert 933 in G
assert ("person", 933) not in G
assert ("random", 933) not in G
assert G.has_edge(933, 4398046511628)
assert G.has_edge(("comment", 618475290625), ("post", 618475290624))
assert not G.has_edge(618475290625, ("post", 618475290624))
# test GET_NODE_DATA and GET_EDGE_DATA
assert G.get_node_data(933) == {
"browserUsed": "Firefox",
"locationIP": "172.16.58.3",
"creationDate": "2010-02-14T15:32:10.447+0000",
"birthday": "1989-12-03",
"gender": "male",
"lastName": "Perera",
"firstName": "Mahinda",
}
assert G.get_edge_data(933, 4398046511628) == {
"creationDate": "2010-07-30T15:19:53.298+0000",
}
assert sorted(list(G.neighbors(933))) == [
4398046511628,
8796093023017,
28587302322537,
]
if G.is_directed():
assert sorted(list(G.predecessors(4398046511628))) == [
318,
933,
987,
2199023256530,
]
def test_str_oid_gs_to_nx(self):
g = self.str_oid_g
nx_g = self.NXGraph(g, default_label="comment")
assert "618475290625" in nx_g
self.assert_convert_success(g, nx_g)
@pytest.mark.skip(reason="TODO: open after supporting run app on arrow_property")
def test_gs_to_nx_with_sssp(self):
nx_g = self.NXGraph(self.p2p)
ret = nx.builtin.single_source_dijkstra_path_length(nx_g, 6, weight="f2")
ret2 = nx.builtin.single_source_dijkstra_path_length(
self.p2p_nx, 6, weight="weight"
)
assert ret == ret2
def test_error_on_wrong_nx_type(self):
g = self.single_label_g
with pytest.raises(NetworkXError):
nx_g = nx.DiGraph(g)
@pytest.mark.skip(reason="FIXME: multiple session crash in ci.")
def test_multiple_sessions(self):
sess2 = graphscope.session(cluster_type="hosts", num_workers=1)
nx2 = sess2.nx()
gs_g = self.single_label_g
if self.NXGraph is nx.Graph:
gs_g2 = ldbc_sample_single_label_with_sess(sess2, self.data_dir, False)
else:
gs_g2 = ldbc_sample_single_label_with_sess(sess2, self.data_dir, True)
assert gs_g.session_id != gs_g2.session_id
nx_g = self.NXGraph(gs_g, dist=True)
if nx_g.is_directed():
nx_g2 = nx2.DiGraph(gs_g2, dist=True)
else:
nx_g2 = nx2.Graph(gs_g2, dist=True)
self.assert_convert_success(gs_g2, nx_g2)
assert nx_g.session_id == gs_g.session_id
assert nx_g2.session_id == gs_g2.session_id
# copies
cg1 = nx_g2.copy()
assert cg1.session_id == nx_g2.session_id
dg1 = nx_g2.to_directed()
assert dg1.session_id == nx_g2.session_id
dg2 = nx_g2.to_directed(as_view=True)
assert dg2.session_id == nx_g2.session_id
# subgraph
sg1 = nx_g2.subgraph([274877907301, 274877907299])
assert sg1.session_id == nx_g2.session_id
sg2 = nx_g2.edge_subgraph([(274877907301, 274877907299)])
assert sg2.session_id == nx_g2.session_id
# error raise if gs graph and nx graph not in the same session.
with pytest.raises(
RuntimeError,
match="graphscope graph and networkx graph not in the same session.",
):
tmp = self.NXGraph(gs_g2)
with pytest.raises(
RuntimeError,
match="networkx graph and graphscope graph not in the same session.",
):
tmp = g(nx_g2)
print(tmp.session_id, nx_g2.session_id)
sess2.close()
@pytest.mark.usefixtures("graphscope_session")
class TestGraphProjectTest(object):
@classmethod
def setup_class(cls):
cls.NXGraph = nx.Graph
edgelist = os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist")
cls.g = nx.read_edgelist(
edgelist, nodetype=int, data=True, create_using=cls.NXGraph
)
cls.g.add_node(1, vdata_str="kdjfao")
cls.g.add_node(1, vdata_int=123)
def test_project_to_simple(self):
# default, e_prop='', v_prop=''
sg1 = self.g._project_to_simple()
assert (
sg1.schema.vdata_type == graph_def_pb2.NULLVALUE
and sg1.schema.edata_type == graph_def_pb2.NULLVALUE
)
# to_simple with e_prop
sg2 = self.g._project_to_simple(e_prop="edata_float")
assert (
sg2.schema.vdata_type == graph_def_pb2.NULLVALUE
and sg2.schema.edata_type == graph_def_pb2.DOUBLE
)
# to_simple with v_prop
sg3 = self.g._project_to_simple(v_prop="vdata_str")
assert (
sg3.schema.vdata_type == graph_def_pb2.STRING
and sg3.schema.edata_type == graph_def_pb2.NULLVALUE
)
# to_simple with e_prop and v_prop
sg4 = self.g._project_to_simple(v_prop="vdata_int", e_prop="edata_str")
assert (
sg4.schema.vdata_type == graph_def_pb2.LONG
and sg4.schema.edata_type == graph_def_pb2.STRING
)
# empty graph to simple
empty_g = self.NXGraph()
sg5 = empty_g._project_to_simple()
assert (
sg5.schema.vdata_type == graph_def_pb2.NULLVALUE
and sg5.schema.edata_type == graph_def_pb2.NULLVALUE
)
with pytest.raises(
InvalidArgumentError, match="graph not contains the vertex property foo"
):
sg6 = empty_g._project_to_simple(v_prop="foo")
@pytest.mark.skip(reason="It use much memory, exceeds the limit of Github runner")
def test_implicit_project_to_simple(self):
g = self.g
nx.builtin.degree_centrality(g)
nx.builtin.single_source_dijkstra_path_length(g, source=6, weight="weight")
def test_error_on_not_exist_vertex_property(self):
g = self.NXGraph()
g.add_node(0, foo="node")
with pytest.raises(
InvalidArgumentError, match="graph not contains the vertex property weight"
):
sg = g._project_to_simple(v_prop="weight")
def test_error_on_not_exist_edge_property(self):
g = self.NXGraph()
g.add_edge(0, 1, weight=3)
with pytest.raises(
InvalidArgumentError, match="graph not contains the edge property type"
):
sg = g._project_to_simple(e_prop="type")
@pytest.mark.skip(reason="FIXME: engine can not catch the app throw error now")
def test_error_on_some_edges_not_contain_property(self):
g = self.g
# some edges not contain the property
with pytest.raises(RuntimeError):
nx.builtin.single_source_dijkstra_path_length(
g, source=6, weight="edata_random_int_0"
)
@pytest.mark.skip(reason="FIXME: engine can not catch the app throw error now")
def test_error_on_some_edges_has_wrong_type(self):
g = self.g.copy()
# set edge a wrong type
g[6][42]["weight"] = "a str"
with pytest.raises(RuntimeError):
nx.builtin.single_source_dijkstra_path_length(g, source=6, weight="weight")
@pytest.mark.skip(reason="find a algorithm that use vertex data")
def test_error_on_some_nodes_not_contain_property(self):
g = self.g
with pytest.raises(RuntimeError):
nx.builtin.sssp(weight="vdata_random_int_0")
@pytest.mark.skip(reason="find a algorithm that use vertex data")
def test_error_on_some_nodes_has_wrong_type(self):
g = self.g.copy()
g[0]["weight"] = "a str"
with pytest.raises(RuntimeError):
nx.builtin.sssp(weight="weight")
@pytest.mark.usefixtures("graphscope_session")
class TestDigraphTransformation(TestGraphTransformation):
@classmethod
def setup_class(cls):
cls.NXGraph = nx.DiGraph
data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample")
cls.single_label_g = ldbc_sample_single_label(data_dir, True)
cls.multi_label_g = ldbc_sample_multi_labels(data_dir, True)
cls.p2p = load_p2p(os.path.expandvars("${GS_TEST_DIR}"), True)
cls.p2p_nx = nx.read_edgelist(
os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"),
nodetype=int,
data=True,
create_using=nx.DiGraph,
)
cls.str_oid_g = ldbc_sample_string_oid(data_dir, True)
@classmethod
def teardown_class(cls):
cls.single_label_g.unload()
cls.multi_label_g.unload()
cls.str_oid_g.unload()
def test_error_on_wrong_nx_type(self):
g = self.single_label_g
with pytest.raises(NetworkXError):
nx_g = nx.Graph(g)
@pytest.mark.usefixtures("graphscope_session")
class TestDiGraphProjectTest(TestGraphProjectTest):
@classmethod
def setup_class(cls):
cls.NXGraph = nx.DiGraph
edgelist = os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist")
cls.g = nx.read_edgelist(
edgelist, nodetype=int, data=True, create_using=cls.NXGraph
)
cls.g.add_node(0, vdata_str="kdjfao")
cls.g.add_node(1, vdata_int=123)
@pytest.mark.usefixtures("graphscope_session")
class TestImportNetworkxModuleWithSession(object):
@classmethod
def setup_class(cls):
cls.session1 = graphscope.session(cluster_type="hosts", num_workers=1)
cls.session2 = graphscope.session(cluster_type="hosts", num_workers=1)
cls.session_lazy = graphscope.session(
cluster_type="hosts", num_workers=1, mode="lazy"
)
def test_import(self):
import graphscope.nx as nx_default
nx1 = self.session1.nx()
nx2 = self.session2.nx()
G = nx_default.Graph()
G1 = nx1.Graph()
G2 = nx2.Graph()
assert G.session_id == get_default_session().session_id
assert G1.session_id == self.session1.session_id
assert G2.session_id == self.session2.session_id
self.session1.close()
self.session2.close()
def test_error_import_with_wrong_session(self):
with pytest.raises(
RuntimeError,
match="Networkx module need the session to be eager mode. Current session is lazy mode.",
):
nx = self.session_lazy.nx()
self.session_lazy.close()
|
[
"graphscope.nx.complete_graph",
"graphscope.nx.builtin.single_source_dijkstra_path_length",
"graphscope.nx.read_edgelist",
"graphscope.client.session.get_default_session",
"graphscope.session",
"graphscope.client.session.g.add_edge",
"pytest.mark.usefixtures",
"graphscope.client.session.g._project_to_simple",
"graphscope.nx.les_miserables_graph",
"graphscope.g",
"graphscope.client.session.g",
"pytest.mark.skip",
"graphscope.client.session.g.add_node",
"graphscope.nx.DiGraph",
"pytest.raises",
"graphscope.nx.builtin.degree_centrality",
"os.path.expandvars",
"os.path.join",
"os.environ.get",
"graphscope.nx.builtin.sssp",
"graphscope.nx.Graph"
] |
[((4390, 4435), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (4413, 4435), False, 'import pytest\n'), ((18519, 18564), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (18542, 18564), False, 'import pytest\n'), ((22584, 22629), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (22607, 22629), False, 'import pytest\n'), ((23624, 23669), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (23647, 23669), False, 'import pytest\n'), ((24092, 24137), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (24115, 24137), False, 'import pytest\n'), ((1094, 1145), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (1106, 1145), False, 'import graphscope\n'), ((1485, 1555), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'oid_type': '"""string"""', 'generate_eid': '(False)'}), "(directed=directed, oid_type='string', generate_eid=False)\n", (1497, 1555), False, 'import graphscope\n'), ((2300, 2351), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (2312, 2351), False, 'import graphscope\n'), ((16103, 16188), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""TODO: open after supporting run app on arrow_property"""'}), "(reason='TODO: open after supporting run app on arrow_property'\n )\n", (16119, 16188), False, 'import pytest\n'), ((16644, 16707), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: multiple session crash in ci."""'}), "(reason='FIXME: multiple session crash in ci.')\n", (16660, 16707), False, 'import pytest\n'), ((20448, 20534), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""It use much memory, exceeds the limit of Github runner"""'}), "(reason=\n 'It use much memory, exceeds the limit of Github runner')\n", (20464, 20534), False, 'import pytest\n'), ((21317, 21395), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: engine can not catch the app throw error now"""'}), "(reason='FIXME: engine can not catch the app throw error now')\n", (21333, 21395), False, 'import pytest\n'), ((21700, 21778), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: engine can not catch the app throw error now"""'}), "(reason='FIXME: engine can not catch the app throw error now')\n", (21716, 21778), False, 'import pytest\n'), ((22065, 22129), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""find a algorithm that use vertex data"""'}), "(reason='find a algorithm that use vertex data')\n", (22081, 22129), False, 'import pytest\n'), ((22315, 22379), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""find a algorithm that use vertex data"""'}), "(reason='find a algorithm that use vertex data')\n", (22331, 22379), False, 'import pytest\n'), ((4573, 4621), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/ldbc_sample"""'], {}), "('${GS_TEST_DIR}/ldbc_sample')\n", (4591, 4621), False, 'import os\n'), ((6245, 6258), 'graphscope.client.session.g', 'g', (['empty_nx_g'], {}), '(empty_nx_g)\n', (6246, 6258), False, 'from graphscope.client.session import g\n'), ((6470, 6477), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6471, 6477), False, 'from graphscope.client.session import g\n'), ((6578, 6626), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(10)'], {'create_using': 'self.NXGraph'}), '(10, create_using=self.NXGraph)\n', (6595, 6626), True, 'import graphscope.nx as nx\n'), ((6642, 6649), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6643, 6649), False, 'from graphscope.client.session import g\n'), ((6992, 6999), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6993, 6999), False, 'from graphscope.client.session import g\n'), ((7102, 7127), 'graphscope.nx.les_miserables_graph', 'nx.les_miserables_graph', ([], {}), '()\n', (7125, 7127), True, 'import graphscope.nx as nx\n'), ((7143, 7150), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (7144, 7150), False, 'from graphscope.client.session import g\n'), ((7782, 7789), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (7783, 7789), False, 'from graphscope.client.session import g\n'), ((7979, 7987), 'graphscope.client.session.g', 'g', (['nx_g2'], {}), '(nx_g2)\n', (7980, 7987), False, 'from graphscope.client.session import g\n'), ((8176, 8184), 'graphscope.client.session.g', 'g', (['nx_g3'], {}), '(nx_g3)\n', (8177, 8184), False, 'from graphscope.client.session import g\n'), ((8399, 8407), 'graphscope.client.session.g', 'g', (['nx_g4'], {}), '(nx_g4)\n', (8400, 8407), False, 'from graphscope.client.session import g\n'), ((8800, 8807), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8801, 8807), False, 'from graphscope.client.session import g\n'), ((9126, 9133), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9127, 9133), False, 'from graphscope.client.session import g\n'), ((9251, 9258), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9252, 9258), False, 'from graphscope.client.session import g\n'), ((9373, 9380), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9374, 9380), False, 'from graphscope.client.session import g\n'), ((9482, 9489), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9483, 9489), False, 'from graphscope.client.session import g\n'), ((9695, 9702), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9696, 9702), False, 'from graphscope.client.session import g\n'), ((10152, 10159), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10153, 10159), False, 'from graphscope.client.session import g\n'), ((11024, 11035), 'graphscope.client.session.g', 'g', (['empty_nx'], {}), '(empty_nx)\n', (11025, 11035), False, 'from graphscope.client.session import g\n'), ((16275, 16342), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['nx_g', '(6)'], {'weight': '"""f2"""'}), "(nx_g, 6, weight='f2')\n", (16320, 16342), True, 'import graphscope.nx as nx\n'), ((16358, 16436), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.p2p_nx', '(6)'], {'weight': '"""weight"""'}), "(self.p2p_nx, 6, weight='weight')\n", (16403, 16436), True, 'import graphscope.nx as nx\n'), ((16762, 16817), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (16780, 16817), False, 'import graphscope\n'), ((18694, 18762), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (18712, 18762), False, 'import os\n'), ((18779, 18856), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['edgelist'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'cls.NXGraph'}), '(edgelist, nodetype=int, data=True, create_using=cls.NXGraph)\n', (18795, 18856), True, 'import graphscope.nx as nx\n'), ((20604, 20635), 'graphscope.nx.builtin.degree_centrality', 'nx.builtin.degree_centrality', (['g'], {}), '(g)\n', (20632, 20635), True, 'import graphscope.nx as nx\n'), ((20644, 20719), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""weight"""'}), "(g, source=6, weight='weight')\n", (20689, 20719), True, 'import graphscope.nx as nx\n'), ((20811, 20836), 'graphscope.client.session.g.add_node', 'g.add_node', (['(0)'], {'foo': '"""node"""'}), "(0, foo='node')\n", (20821, 20836), False, 'from graphscope.client.session import g\n'), ((21108, 21134), 'graphscope.client.session.g.add_edge', 'g.add_edge', (['(0)', '(1)'], {'weight': '(3)'}), '(0, 1, weight=3)\n', (21118, 21134), False, 'from graphscope.client.session import g\n'), ((22783, 22831), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/ldbc_sample"""'], {}), "('${GS_TEST_DIR}/ldbc_sample')\n", (22801, 22831), False, 'import os\n'), ((23817, 23885), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (23835, 23885), False, 'import os\n'), ((23902, 23979), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['edgelist'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'cls.NXGraph'}), '(edgelist, nodetype=int, data=True, create_using=cls.NXGraph)\n', (23918, 23979), True, 'import graphscope.nx as nx\n'), ((24255, 24310), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (24273, 24310), False, 'import graphscope\n'), ((24334, 24389), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (24352, 24389), False, 'import graphscope\n'), ((24417, 24485), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)', 'mode': '"""lazy"""'}), "(cluster_type='hosts', num_workers=1, mode='lazy')\n", (24435, 24485), False, 'import graphscope\n'), ((24658, 24676), 'graphscope.nx.Graph', 'nx_default.Graph', ([], {}), '()\n', (24674, 24676), True, 'import graphscope.nx as nx_default\n'), ((1193, 1232), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (1205, 1232), False, 'import os\n'), ((1310, 1365), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (1322, 1365), False, 'import os\n'), ((1603, 1642), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (1615, 1642), False, 'import os\n'), ((1720, 1775), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (1732, 1775), False, 'import os\n'), ((2006, 2045), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (2018, 2045), False, 'import os\n'), ((2123, 2178), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (2135, 2178), False, 'import os\n'), ((2995, 3031), 'os.path.join', 'os.path.join', (['prefix', '"""post_0_0.csv"""'], {}), "(prefix, 'post_0_0.csv')\n", (3007, 3031), False, 'import os\n'), ((3849, 3901), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_post_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_post_0_0.csv')\n", (3861, 3901), False, 'import os\n'), ((4798, 4834), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (4816, 4834), False, 'import os\n'), ((4894, 4962), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (4912, 4962), False, 'import os\n'), ((9828, 9974), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"""'}), '(RuntimeError, match=\n "The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"\n )\n', (9841, 9974), False, 'import pytest\n'), ((10020, 10027), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10021, 10027), False, 'from graphscope.client.session import g\n'), ((10420, 10492), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""graph view can not convert to gs graph"""'}), "(TypeError, match='graph view can not convert to gs graph')\n", (10433, 10492), False, 'import pytest\n'), ((10513, 10520), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10514, 10520), False, 'from graphscope.client.session import g\n'), ((10705, 10851), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"""'}), '(RuntimeError, match=\n "The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"\n )\n', (10718, 10851), False, 'import pytest\n'), ((10897, 10904), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10898, 10904), False, 'from graphscope.client.session import g\n'), ((12754, 12788), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (12768, 12788), False, 'import os\n'), ((16575, 16603), 'pytest.raises', 'pytest.raises', (['NetworkXError'], {}), '(NetworkXError)\n', (16588, 16603), False, 'import pytest\n'), ((16624, 16637), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['g'], {}), '(g)\n', (16634, 16637), True, 'import graphscope.nx as nx\n'), ((18095, 18197), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""graphscope graph and networkx graph not in the same session."""'}), "(RuntimeError, match=\n 'graphscope graph and networkx graph not in the same session.')\n", (18108, 18197), False, 'import pytest\n'), ((18280, 18382), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""networkx graph and graphscope graph not in the same session."""'}), "(RuntimeError, match=\n 'networkx graph and graphscope graph not in the same session.')\n", (18293, 18382), False, 'import pytest\n'), ((18432, 18440), 'graphscope.client.session.g', 'g', (['nx_g2'], {}), '(nx_g2)\n', (18433, 18440), False, 'from graphscope.client.session import g\n'), ((20272, 20364), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the vertex property foo"""'}), "(InvalidArgumentError, match=\n 'graph not contains the vertex property foo')\n", (20285, 20364), False, 'import pytest\n'), ((20850, 20945), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the vertex property weight"""'}), "(InvalidArgumentError, match=\n 'graph not contains the vertex property weight')\n", (20863, 20945), False, 'import pytest\n'), ((20981, 21018), 'graphscope.client.session.g._project_to_simple', 'g._project_to_simple', ([], {'v_prop': '"""weight"""'}), "(v_prop='weight')\n", (21001, 21018), False, 'from graphscope.client.session import g\n'), ((21148, 21239), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the edge property type"""'}), "(InvalidArgumentError, match=\n 'graph not contains the edge property type')\n", (21161, 21239), False, 'import pytest\n'), ((21275, 21310), 'graphscope.client.session.g._project_to_simple', 'g._project_to_simple', ([], {'e_prop': '"""type"""'}), "(e_prop='type')\n", (21295, 21310), False, 'from graphscope.client.session import g\n'), ((21535, 21562), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (21548, 21562), False, 'import pytest\n'), ((21576, 21668), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""edata_random_int_0"""'}), "(g, source=6, weight=\n 'edata_random_int_0')\n", (21621, 21668), True, 'import graphscope.nx as nx\n'), ((21942, 21969), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (21955, 21969), False, 'import pytest\n'), ((21983, 22058), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""weight"""'}), "(g, source=6, weight='weight')\n", (22028, 22058), True, 'import graphscope.nx as nx\n'), ((22223, 22250), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (22236, 22250), False, 'import pytest\n'), ((22264, 22308), 'graphscope.nx.builtin.sssp', 'nx.builtin.sssp', ([], {'weight': '"""vdata_random_int_0"""'}), "(weight='vdata_random_int_0')\n", (22279, 22308), True, 'import graphscope.nx as nx\n'), ((22507, 22534), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (22520, 22534), False, 'import pytest\n'), ((22548, 22580), 'graphscope.nx.builtin.sssp', 'nx.builtin.sssp', ([], {'weight': '"""weight"""'}), "(weight='weight')\n", (22563, 22580), True, 'import graphscope.nx as nx\n'), ((22998, 23034), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (23016, 23034), False, 'import os\n'), ((23093, 23161), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (23111, 23161), False, 'import os\n'), ((23560, 23588), 'pytest.raises', 'pytest.raises', (['NetworkXError'], {}), '(NetworkXError)\n', (23573, 23588), False, 'import pytest\n'), ((23609, 23620), 'graphscope.nx.Graph', 'nx.Graph', (['g'], {}), '(g)\n', (23617, 23620), True, 'import graphscope.nx as nx\n'), ((25032, 25159), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""Networkx module need the session to be eager mode. Current session is lazy mode."""'}), "(RuntimeError, match=\n 'Networkx module need the session to be eager mode. Current session is lazy mode.'\n )\n", (25045, 25159), False, 'import pytest\n'), ((24758, 24779), 'graphscope.client.session.get_default_session', 'get_default_session', ([], {}), '()\n', (24777, 24779), False, 'from graphscope.client.session import get_default_session\n'), ((2625, 2663), 'os.path.join', 'os.path.join', (['prefix', '"""person_0_0.csv"""'], {}), "(prefix, 'person_0_0.csv')\n", (2637, 2663), False, 'import os\n'), ((3616, 3667), 'os.path.join', 'os.path.join', (['prefix', '"""person_knows_person_0_0.csv"""'], {}), "(prefix, 'person_knows_person_0_0.csv')\n", (3628, 3667), False, 'import os\n'), ((4200, 4232), 'os.path.join', 'os.path.join', (['prefix', '"""p2p-31.e"""'], {}), "(prefix, 'p2p-31.e')\n", (4212, 4232), False, 'import os\n'), ((2413, 2452), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (2425, 2452), False, 'import os\n'), ((3392, 3447), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (3404, 3447), False, 'import os\n')]
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 4