code
stringlengths
239
50.1k
apis
list
extract_api
stringlengths
246
34.7k
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy import queue from enum import Enum from graphscope.proto import op_def_pb2 from graphscope.proto import types_pb2 class GSEngine(Enum): analytical_engine = 0 interactive_engine = 1 learning_engine = 2 class DAGManager(object): _analytical_engine_split_op = [ types_pb2.BIND_APP, # need loaded graph to compile types_pb2.ADD_LABELS, # need loaded graph types_pb2.RUN_APP, # need loaded app types_pb2.CONTEXT_TO_NUMPY, # need loaded graph to transform selector types_pb2.CONTEXT_TO_DATAFRAME, # need loaded graph to transform selector types_pb2.GRAPH_TO_NUMPY, # need loaded graph to transform selector types_pb2.GRAPH_TO_DATAFRAME, # need loaded graph to transform selector types_pb2.TO_VINEYARD_TENSOR, # need loaded graph to transform selector types_pb2.TO_VINEYARD_DATAFRAME, # need loaded graph to transform selector types_pb2.PROJECT_GRAPH, # need loaded graph to transform selector types_pb2.PROJECT_TO_SIMPLE, # need loaded graph schema information types_pb2.ADD_COLUMN, # need ctx result types_pb2.UNLOAD_GRAPH, # need loaded graph information types_pb2.UNLOAD_APP, # need loaded app information ] _interactive_engine_split_op = [ types_pb2.CREATE_INTERACTIVE_QUERY, types_pb2.SUBGRAPH, types_pb2.GREMLIN_QUERY, types_pb2.FETCH_GREMLIN_RESULT, types_pb2.CLOSE_INTERACTIVE_QUERY, ] _learning_engine_split_op = [ types_pb2.CREATE_LEARNING_INSTANCE, types_pb2.CLOSE_LEARNING_INSTANCE, ] def __init__(self, dag_def: op_def_pb2.DagDef): self._dag_def = dag_def self._split_dag_def_queue = queue.Queue() # split dag split_dag_def = op_def_pb2.DagDef() split_dag_def_for = GSEngine.analytical_engine for op in self._dag_def.op: if op.op in self._analytical_engine_split_op: if split_dag_def.op: self._split_dag_def_queue.put((split_dag_def_for, split_dag_def)) split_dag_def = op_def_pb2.DagDef() split_dag_def_for = GSEngine.analytical_engine if op.op in self._interactive_engine_split_op: if split_dag_def.op: self._split_dag_def_queue.put((split_dag_def_for, split_dag_def)) split_dag_def = op_def_pb2.DagDef() split_dag_def_for = GSEngine.interactive_engine if op.op in self._learning_engine_split_op: if split_dag_def.op: self._split_dag_def_queue.put((split_dag_def_for, split_dag_def)) split_dag_def = op_def_pb2.DagDef() split_dag_def_for = GSEngine.learning_engine split_dag_def.op.extend([copy.deepcopy(op)]) if len(split_dag_def.op) > 0: self._split_dag_def_queue.put((split_dag_def_for, split_dag_def)) def empty(self): return self._split_dag_def_queue.empty() def get_next_dag(self): if not self._split_dag_def_queue.empty(): return self._split_dag_def_queue.get() return None
[ "queue.Queue", "graphscope.proto.op_def_pb2.DagDef", "copy.deepcopy" ]
[((2396, 2409), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (2407, 2409), False, 'import queue\n'), ((2455, 2474), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (2472, 2474), False, 'from graphscope.proto import op_def_pb2\n'), ((2779, 2798), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (2796, 2798), False, 'from graphscope.proto import op_def_pb2\n'), ((3076, 3095), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (3093, 3095), False, 'from graphscope.proto import op_def_pb2\n'), ((3371, 3390), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (3388, 3390), False, 'from graphscope.proto import op_def_pb2\n'), ((3489, 3506), 'copy.deepcopy', 'copy.deepcopy', (['op'], {}), '(op)\n', (3502, 3506), False, 'import copy\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os from graphscope.framework.graph import Graph from graphscope.framework.loader import Loader def load_modern_graph(sess, prefix, directed=True): """Load modern graph. Modern graph consist 6 vertices and 6 edges, useful to test the basic functionalities. Args: sess (:class:`graphscope.Session`): Load graph within the session. prefix (str): Data directory. directed (bool, optional): Determine to load a directed or undirected graph. Defaults to True. Returns: :class:`graphscope.Graph`: A Graph object which graph type is ArrowProperty """ graph = Graph(sess, directed=directed) graph = ( graph.add_vertices( Loader(os.path.join(prefix, "person.csv"), delimiter="|"), "person", ["name", ("age", "int")], "id", ) .add_vertices( Loader(os.path.join(prefix, "software.csv"), delimiter="|"), "software", ["name", "lang"], "id", ) .add_edges( Loader(os.path.join(prefix, "knows.csv"), delimiter="|"), "knows", ["weight"], src_label="person", dst_label="person", src_field="src_id", dst_field="dst_id", ) .add_edges( Loader(os.path.join(prefix, "created.csv"), delimiter="|"), "created", ["weight"], src_label="person", dst_label="software", src_field="src_id", dst_field="dst_id", ) ) return graph
[ "graphscope.framework.graph.Graph", "os.path.join" ]
[((1302, 1332), 'graphscope.framework.graph.Graph', 'Graph', (['sess'], {'directed': 'directed'}), '(sess, directed=directed)\n', (1307, 1332), False, 'from graphscope.framework.graph import Graph\n'), ((2024, 2059), 'os.path.join', 'os.path.join', (['prefix', '"""created.csv"""'], {}), "(prefix, 'created.csv')\n", (2036, 2059), False, 'import os\n'), ((1751, 1784), 'os.path.join', 'os.path.join', (['prefix', '"""knows.csv"""'], {}), "(prefix, 'knows.csv')\n", (1763, 1784), False, 'import os\n'), ((1576, 1612), 'os.path.join', 'os.path.join', (['prefix', '"""software.csv"""'], {}), "(prefix, 'software.csv')\n", (1588, 1612), False, 'import os\n'), ((1394, 1428), 'os.path.join', 'os.path.join', (['prefix', '"""person.csv"""'], {}), "(prefix, 'person.csv')\n", (1406, 1428), False, 'import os\n')]
import networkx.algorithms.tests.test_link_prediction import pytest from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tests.test_link_prediction, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tests.test_link_prediction import TestAdamicAdarIndex from networkx.algorithms.tests.test_link_prediction import TestCNSoundarajanHopcroft from networkx.algorithms.tests.test_link_prediction import TestJaccardCoefficient from networkx.algorithms.tests.test_link_prediction import TestPreferentialAttachment from networkx.algorithms.tests.test_link_prediction import \ TestRAIndexSoundarajanHopcroft from networkx.algorithms.tests.test_link_prediction import TestResourceAllocationIndex from networkx.algorithms.tests.test_link_prediction import TestWithinInterCluster @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestResourceAllocationIndex) class TestResourceAllocationIndex: def test_notimplemented(self): assert pytest.raises(nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestJaccardCoefficient) class TestJaccardCoefficient: def test_notimplemented(self): assert pytest.raises(nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestPreferentialAttachment) class TestPreferentialAttachment: def test_notimplemented(self): assert pytest.raises(nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestAdamicAdarIndex) class TestAdamicAdarIndex: def test_notimplemented(self): assert pytest.raises(nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestCNSoundarajanHopcroft) class TestCNSoundarajanHopcroft: def test_notimplemented(self): G = nx.DiGraph([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestRAIndexSoundarajanHopcroft) class TestRAIndexSoundarajanHopcroft: def test_notimplemented(self): G = nx.DiGraph([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestWithinInterCluster) class TestWithinInterCluster: def test_notimplemented(self): G = nx.DiGraph([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
[ "graphscope.experimental.nx.utils.compat.with_graphscope_nx_context", "pytest.raises", "pytest.mark.usefixtures" ]
[((980, 1025), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1003, 1025), False, 'import pytest\n'), ((1027, 1082), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestResourceAllocationIndex'], {}), '(TestResourceAllocationIndex)\n', (1053, 1082), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1292, 1337), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1315, 1337), False, 'import pytest\n'), ((1339, 1389), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestJaccardCoefficient'], {}), '(TestJaccardCoefficient)\n', (1365, 1389), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1594, 1639), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1617, 1639), False, 'import pytest\n'), ((1641, 1695), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestPreferentialAttachment'], {}), '(TestPreferentialAttachment)\n', (1667, 1695), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1904, 1949), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1927, 1949), False, 'import pytest\n'), ((1951, 1998), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestAdamicAdarIndex'], {}), '(TestAdamicAdarIndex)\n', (1977, 1998), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((2200, 2245), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2223, 2245), False, 'import pytest\n'), ((2247, 2300), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestCNSoundarajanHopcroft'], {}), '(TestCNSoundarajanHopcroft)\n', (2273, 2300), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((2542, 2587), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2565, 2587), False, 'import pytest\n'), ((2589, 2647), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestRAIndexSoundarajanHopcroft'], {}), '(TestRAIndexSoundarajanHopcroft)\n', (2615, 2647), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((2894, 2939), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2917, 2939), False, 'import pytest\n'), ((2941, 2991), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestWithinInterCluster'], {}), '(TestWithinInterCluster)\n', (2967, 2991), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((332, 377), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (355, 377), False, 'import pytest\n'), ((2474, 2538), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'self.func', 'G', '[(0, 2)]'], {}), '(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])\n', (2487, 2538), False, 'import pytest\n'), ((2826, 2890), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'self.func', 'G', '[(0, 2)]'], {}), '(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])\n', (2839, 2890), False, 'import pytest\n'), ((3162, 3226), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'self.func', 'G', '[(0, 2)]'], {}), '(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])\n', (3175, 3226), False, 'import pytest\n')]
import networkx.algorithms.centrality.tests.test_current_flow_betweenness_centrality import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx( networkx.algorithms.centrality.tests.test_current_flow_betweenness_centrality, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.centrality.tests.test_current_flow_betweenness_centrality import \ TestApproximateFlowBetweennessCentrality @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestApproximateFlowBetweennessCentrality) class TestApproximateFlowBetweennessCentrality: # NB: graphscope.nx does not support grid_graph, pass the test def test_grid(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((542, 587), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (565, 587), False, 'import pytest\n'), ((589, 657), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestApproximateFlowBetweennessCentrality'], {}), '(TestApproximateFlowBetweennessCentrality)\n', (615, 657), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((353, 398), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (376, 398), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import importlib import logging import os import random import string import sys import time import numpy as np import pytest import graphscope graphscope.set_option(show_log=True) from graphscope import property_sssp from graphscope import sssp from graphscope.framework.app import AppAssets from graphscope.framework.app import AppDAGNode from graphscope.framework.errors import AnalyticalEngineInternalError from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.loader import Loader test_repo_dir = os.path.expandvars("${GS_TEST_DIR}") prefix = os.path.join(test_repo_dir, "ogbn_mag_small") new_property_dir = os.path.join(test_repo_dir, "new_property", "v2_e2") @pytest.fixture(scope="module") def sess(): session = graphscope.session(cluster_type="hosts", num_workers=2, mode="lazy") session.as_default() yield session session.close() @pytest.fixture(scope="function") def student_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return Loader("%s/student.v" % data_dir, header_row=True, delimiter=",") @pytest.fixture(scope="function") def teacher_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return Loader("%s/teacher.v" % data_dir, header_row=True, delimiter=",") @pytest.fixture(scope="function") def student_group_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return Loader("%s/group.e" % data_dir, header_row=True, delimiter=",") @pytest.fixture(scope="function") def teacher_group_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return Loader("%s/teacher_group.e" % data_dir, header_row=True, delimiter=",") def arrow_property_graph(graphscope_session): g = graphscope_session.g(generate_eid=False) g = g.add_vertices(f"{new_property_dir}/twitter_v_0", "v0") g = g.add_vertices(f"{new_property_dir}/twitter_v_1", "v1") g = g.add_edges(f"{new_property_dir}/twitter_e_0_0_0", "e0", ["weight"], "v0", "v0") g = g.add_edges(f"{new_property_dir}/twitter_e_0_1_0", "e0", ["weight"], "v0", "v1") g = g.add_edges(f"{new_property_dir}/twitter_e_1_0_0", "e0", ["weight"], "v1", "v0") g = g.add_edges(f"{new_property_dir}/twitter_e_1_1_0", "e0", ["weight"], "v1", "v1") g = g.add_edges(f"{new_property_dir}/twitter_e_0_0_1", "e1", ["weight"], "v0", "v0") g = g.add_edges(f"{new_property_dir}/twitter_e_0_1_1", "e1", ["weight"], "v0", "v1") g = g.add_edges(f"{new_property_dir}/twitter_e_1_0_1", "e1", ["weight"], "v1", "v0") g = g.add_edges(f"{new_property_dir}/twitter_e_1_1_1", "e1", ["weight"], "v1", "v1") return g def test_vertices_omitted_form_loader(sess, student_group_e): g = sess.g() g1 = g.add_edges(student_group_e) g2 = sess.run(g1) # g2 is a Graph instance assert g2.loaded() def test_construct_graph_step_by_step(sess): _g = sess.g(generate_eid=False) g = sess.run(_g) _g1 = g.add_vertices(f"{new_property_dir}/twitter_v_0", "v0") g1 = sess.run(_g1) _g2 = g1.add_vertices(f"{new_property_dir}/twitter_v_1", "v1") g2 = sess.run(_g2) ug = g.unload() ug1 = g1.unload() ug2 = g2.unload() sess.run([ug, ug1, ug2]) def test_unload_graph(sess, student_v, teacher_v, student_group_e): # case 1 # 1. load empty g # 2. unload g g = sess.g() ug = g.unload() assert sess.run(ug) is None # case 2 g = sess.g() g1 = g.add_vertices(student_v, "student") g2 = g.add_vertices(teacher_v, "teacher") ug1 = g1.unload() ug2 = g2.unload() assert sess.run(ug1) is None assert sess.run(ug2) is None # case 3 g = sess.g() g1 = g.add_vertices(student_v, "student") g2 = g1.add_vertices(teacher_v, "teacher") g3 = g2.add_edges( student_group_e, "group", src_label="student", dst_label="student" ) ug = g.unload() ug1 = g1.unload() ug2 = g2.unload() ug3 = g3.unload() sess.run([ug, ug1, ug2, ug3]) # case 4 # test unload twice g = sess.g() ug = g.unload() assert sess.run(ug) is None assert sess.run(ug) is None def test_error_using_unload_graph(sess, student_v): with pytest.raises(AnalyticalEngineInternalError): g = sess.g() ug = g.unload() g1 = g.add_vertices(student_v, "student") sess.run([ug, g1]) def test_unload_app(sess): g = arrow_property_graph(sess) # case 1 a1 = AppDAGNode(g, AppAssets(algo="property_sssp", context="labeled_vertex_data")) ua1 = a1.unload() assert sess.run(ua1) is None # case 2 # unload app twice a1 = AppDAGNode(g, AppAssets(algo="property_sssp", context="labeled_vertex_data")) ua1 = a1.unload() assert sess.run(ua1) is None assert sess.run(ua1) is None # case 3 # load app after unload a1 = AppDAGNode(g, AppAssets(algo="property_sssp", context="labeled_vertex_data")) ua1 = a1.unload() assert sess.run(ua1) is None c1 = a1(src=20) r1 = c1.to_numpy("r:v0.dist_0") r = sess.run(r1) assert r.shape == (40521,) def test_graph_to_numpy(sess): g = arrow_property_graph(sess) c = property_sssp(g, 20) ctx_out_np = c.to_numpy("r:v0.dist_0") g2 = g.add_column(c, {"result_0": "r:v0.dist_0"}) graph_out_np = g2.to_numpy("v:v0.result_0") r = sess.run([ctx_out_np, graph_out_np]) assert np.all(r[0] == r[1]) # unload graph ug = g.unload() ug2 = g2.unload() sess.run([ug, ug2]) def test_graph_to_dataframe(sess): g = arrow_property_graph(sess) c = property_sssp(g, 20) ctx_out_df = c.to_dataframe({"result": "r:v0.dist_0"}) g2 = g.add_column(c, {"result_0": "r:v0.dist_0"}) graph_out_df = g2.to_dataframe({"result": "v:v0.result_0"}) r = sess.run([ctx_out_df, graph_out_df]) assert r[0].equals(r[1]) # unload graph ug = g.unload() ug2 = g2.unload() sess.run([ug, ug2]) def test_context(sess): g = arrow_property_graph(sess) c = property_sssp(g, 20) r1 = c.to_numpy("r:v0.dist_0") r2 = c.to_dataframe({"id": "v:v0.id", "result": "r:v0.dist_0"}) r3 = c.to_vineyard_tensor("v:v0.id") r4 = c.to_vineyard_dataframe( {"id": "v:v0.id", "data": "v:v0.dist", "result": "r:v0.dist_0"} ) r = sess.run([r1, r2, r3, r4]) assert r[0].shape == (40521,) assert r[1].shape == (40521, 2) assert r[2] is not None assert r[3] is not None def test_error_selector_context(sess): # case 1 # labeled vertex data context g = arrow_property_graph(sess) c = property_sssp(g, 20) with pytest.raises( InvalidArgumentError, match="Selector in labeled vertex data context cannot be None", ): r = c.to_numpy(selector=None) with pytest.raises(ValueError, match="not enough values to unpack"): # missing ":" in selectot r = c.to_numpy("r.v0.dist_0") with pytest.raises(SyntaxError, match="Invalid selector"): # must be "v/e/r:xxx" r = c.to_numpy("c:v0.dist_0") with pytest.raises(SyntaxError, match="Invalid selector"): # format error c.to_numpy("r:v0.dist_0.dist_1") # case 2 # vertex data context pg = g.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]}) c = sssp(pg, 20) with pytest.raises(SyntaxError, match="Selector of v must be 'v.id' or 'v.data'"): r = c.to_dataframe({"id": "v.ID"}) with pytest.raises(ValueError, match="selector of to_dataframe must be a dict"): r = c.to_dataframe("id") def test_query_after_project(sess): g = arrow_property_graph(sess) pg = g.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]}) # property sssp on property graph # sssp on simple graph c = sssp(pg, 20) r1 = c.to_dataframe({"node": "v.id", "r": "r"}) r = sess.run(r1) assert r.shape == (40521, 2) def test_add_column(sess): g = arrow_property_graph(sess) pg = g.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]}) c = sssp(pg, 20) g1 = g.add_column(c, {"id_col": "v.id", "data_col": "v.data", "result_col": "r"}) sess.run(g1) def test_multi_src_dst_edge_loader( sess, student_group_e, teacher_group_e, student_v, teacher_v ): graph = sess.g() graph = graph.add_vertices( student_v, "student", ["name", "lesson_nums", "avg_score"], "student_id" ) graph = graph.add_vertices( teacher_v, "teacher", ["student_num", "score", "email", "tel"], "teacher_id" ) graph = graph.add_edges( student_group_e, "group", ["group_id", "member_size"], src_label="student", dst_label="student", src_field="leader_student_id", dst_field="member_student_id", ) graph = graph.add_edges( teacher_group_e, "group", ["group_id", "member_size"], src_label="teacher", dst_label="teacher", src_field="leader_teacher_id", dst_field="member_teacher_id", ) g = sess.run(graph) def test_simulate_eager(sess): g1_node = arrow_property_graph(sess) g1 = sess.run(g1_node) c_node = property_sssp(g1, 20) c = sess.run(c_node) r_node = c.to_numpy("r:v0.dist_0") r = sess.run(r_node) assert r.shape == (40521,) pg_node = g1.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]}) pg = sess.run(pg_node) c_node = sssp(pg, 20) c = sess.run(c_node) g2_node = g1.add_column( c, {"id_col": "v.id", "data_col": "v.data", "result_col": "r"} ) g2 = sess.run(g2_node)
[ "os.path.expandvars", "os.path.join", "graphscope.framework.app.AppAssets", "graphscope.session", "graphscope.framework.loader.Loader", "graphscope.set_option", "graphscope.property_sssp", "graphscope.sssp", "pytest.raises", "pytest.fixture", "numpy.all" ]
[((814, 850), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (835, 850), False, 'import graphscope\n'), ((1207, 1243), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (1225, 1243), False, 'import os\n'), ((1253, 1298), 'os.path.join', 'os.path.join', (['test_repo_dir', '"""ogbn_mag_small"""'], {}), "(test_repo_dir, 'ogbn_mag_small')\n", (1265, 1298), False, 'import os\n'), ((1319, 1371), 'os.path.join', 'os.path.join', (['test_repo_dir', '"""new_property"""', '"""v2_e2"""'], {}), "(test_repo_dir, 'new_property', 'v2_e2')\n", (1331, 1371), False, 'import os\n'), ((1375, 1405), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1389, 1405), False, 'import pytest\n'), ((1567, 1599), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1581, 1599), False, 'import pytest\n'), ((1757, 1789), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1771, 1789), False, 'import pytest\n'), ((1947, 1979), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1961, 1979), False, 'import pytest\n'), ((2141, 2173), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2155, 2173), False, 'import pytest\n'), ((1432, 1500), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(2)', 'mode': '"""lazy"""'}), "(cluster_type='hosts', num_workers=2, mode='lazy')\n", (1450, 1500), False, 'import graphscope\n'), ((1623, 1674), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1641, 1674), False, 'import os\n'), ((1688, 1753), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/student.v' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/student.v' % data_dir, header_row=True, delimiter=',')\n", (1694, 1753), False, 'from graphscope.framework.loader import Loader\n'), ((1813, 1864), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1831, 1864), False, 'import os\n'), ((1878, 1943), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/teacher.v' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/teacher.v' % data_dir, header_row=True, delimiter=',')\n", (1884, 1943), False, 'from graphscope.framework.loader import Loader\n'), ((2009, 2060), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2027, 2060), False, 'import os\n'), ((2074, 2137), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/group.e' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/group.e' % data_dir, header_row=True, delimiter=',')\n", (2080, 2137), False, 'from graphscope.framework.loader import Loader\n'), ((2203, 2254), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2221, 2254), False, 'import os\n'), ((2268, 2339), 'graphscope.framework.loader.Loader', 'Loader', (["('%s/teacher_group.e' % data_dir)"], {'header_row': '(True)', 'delimiter': '""","""'}), "('%s/teacher_group.e' % data_dir, header_row=True, delimiter=',')\n", (2274, 2339), False, 'from graphscope.framework.loader import Loader\n'), ((5799, 5819), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (5812, 5819), False, 'from graphscope import property_sssp\n'), ((6021, 6041), 'numpy.all', 'np.all', (['(r[0] == r[1])'], {}), '(r[0] == r[1])\n', (6027, 6041), True, 'import numpy as np\n'), ((6207, 6227), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (6220, 6227), False, 'from graphscope import property_sssp\n'), ((6633, 6653), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (6646, 6653), False, 'from graphscope import property_sssp\n'), ((7202, 7222), 'graphscope.property_sssp', 'property_sssp', (['g', '(20)'], {}), '(g, 20)\n', (7215, 7222), False, 'from graphscope import property_sssp\n'), ((7915, 7927), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (7919, 7927), False, 'from graphscope import sssp\n'), ((8392, 8404), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (8396, 8404), False, 'from graphscope import sssp\n'), ((8653, 8665), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (8657, 8665), False, 'from graphscope import sssp\n'), ((9776, 9797), 'graphscope.property_sssp', 'property_sssp', (['g1', '(20)'], {}), '(g1, 20)\n', (9789, 9797), False, 'from graphscope import property_sssp\n'), ((10034, 10046), 'graphscope.sssp', 'sssp', (['pg', '(20)'], {}), '(pg, 20)\n', (10038, 10046), False, 'from graphscope import sssp\n'), ((4831, 4875), 'pytest.raises', 'pytest.raises', (['AnalyticalEngineInternalError'], {}), '(AnalyticalEngineInternalError)\n', (4844, 4875), False, 'import pytest\n'), ((5100, 5162), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (5109, 5162), False, 'from graphscope.framework.app import AppAssets\n'), ((5279, 5341), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (5288, 5341), False, 'from graphscope.framework.app import AppAssets\n'), ((5496, 5558), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (5505, 5558), False, 'from graphscope.framework.app import AppAssets\n'), ((7232, 7336), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""Selector in labeled vertex data context cannot be None"""'}), "(InvalidArgumentError, match=\n 'Selector in labeled vertex data context cannot be None')\n", (7245, 7336), False, 'import pytest\n'), ((7403, 7465), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""not enough values to unpack"""'}), "(ValueError, match='not enough values to unpack')\n", (7416, 7465), False, 'import pytest\n'), ((7548, 7600), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Invalid selector"""'}), "(SyntaxError, match='Invalid selector')\n", (7561, 7600), False, 'import pytest\n'), ((7679, 7731), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Invalid selector"""'}), "(SyntaxError, match='Invalid selector')\n", (7692, 7731), False, 'import pytest\n'), ((7937, 8013), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Selector of v must be \'v.id\' or \'v.data\'"""'}), '(SyntaxError, match="Selector of v must be \'v.id\' or \'v.data\'")\n', (7950, 8013), False, 'import pytest\n'), ((8067, 8141), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""selector of to_dataframe must be a dict"""'}), "(ValueError, match='selector of to_dataframe must be a dict')\n", (8080, 8141), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from enum import Enum from typing import Any from typing import NamedTuple import networkx.readwrite.gml from graphscope.nx.utils.compat import import_as_graphscope_nx import_as_graphscope_nx(networkx.readwrite.gml) class Pattern(Enum): """ encodes the index of each token-matching pattern in `tokenize`. """ KEYS = 0 REALS = 1 INTS = 2 STRINGS = 3 DICT_START = 4 DICT_END = 5 COMMENT_WHITESPACE = 6 class Token(NamedTuple): category: Pattern value: Any line: int position: int
[ "graphscope.nx.utils.compat.import_as_graphscope_nx" ]
[((838, 885), 'graphscope.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.readwrite.gml'], {}), '(networkx.readwrite.gml)\n', (861, 885), False, 'from graphscope.nx.utils.compat import import_as_graphscope_nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from itertools import chain from typing import Any from typing import Dict from typing import Mapping from typing import Sequence from typing import Tuple from typing import Union import numpy as np import pandas as pd from graphscope.client.session import get_default_session from graphscope.framework import dag_utils from graphscope.framework import utils from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.errors import check_argument from graphscope.framework.graph import Graph from graphscope.framework.loader import Loader from graphscope.framework.vineyard_object import VineyardObject from graphscope.proto import attr_value_pb2 from graphscope.proto import types_pb2 __all__ = ["load_from"] LoaderVariants = Union[Loader, str, Sequence[np.ndarray], pd.DataFrame, VineyardObject] class VertexLabel(object): """Holds meta informations about a single vertex label.""" def __init__( self, label: str, loader: Any, properties: Sequence = None, vid: Union[str, int] = 0, ): self.label = label if isinstance(loader, Loader): self.loader = loader else: self.loader = Loader(loader) self.raw_properties = properties self.properties = [] self.vid = vid def finish(self, id_type: str = "int64_t"): # Normalize properties # Add vid to property list self.add_property(str(self.vid), id_type) if self.raw_properties: self.add_properties(self.raw_properties) elif self.loader.deduced_properties: self.add_properties(self.loader.deduced_properties) self.loader.select_columns( self.properties, include_all=bool(not self.raw_properties) ) def __str__(self) -> str: s = "\ntype: VertexLabel" s += "\nlabel: " + self.label s += "\nproperties: " + str(self.properties) s += "\nvid: " + str(self.vid) s += "\nloader: " + repr(self.loader) return s def __repr__(self) -> str: return self.__str__() def add_property(self, prop: str, dtype=None) -> None: """prop is a str, representing name. It can optionally have a type.""" self.properties.append((prop, utils.unify_type(dtype))) def add_properties(self, properties: Sequence) -> None: for prop in properties: if isinstance(prop, str): self.add_property(prop) else: self.add_property(prop[0], prop[1]) class EdgeSubLabel(object): """Hold meta informations of a single relationship. i.e. src_label -> edge_label -> dst_label """ def __init__( self, loader, properties=None, source=None, destination=None, load_strategy="both_out_in", ): if isinstance(loader, Loader): self.loader = loader else: self.loader = Loader(loader) self.raw_properties = properties self.properties = [] self.source_vid = 0 self.source_label = "" self.destination_vid = 1 self.destination_label = "" self.load_strategy = "" if source is not None: self.set_source(source) if destination is not None: self.set_destination(destination) if ( isinstance(self.source_vid, int) and isinstance(self.destination_vid, str) ) or ( isinstance(self.source_vid, str) and isinstance(self.destination_vid, int) ): raise SyntaxError( "Source vid and destination vid must have same formats, both use name or both use index" ) self.set_load_strategy(load_strategy) def finish(self, id_type: str): self.add_property(str(self.source_vid), id_type) self.add_property(str(self.destination_vid), id_type) if self.raw_properties: self.add_properties(self.raw_properties) elif self.loader.deduced_properties: self.add_properties(self.loader.deduced_properties) self.loader.select_columns( self.properties, include_all=bool(not self.raw_properties) ) def __str__(self) -> str: s = "\ntype: EdgeSubLabel" s += "\nsource_label: " + self.source_label s += "\ndestination_label: " + self.destination_label s += "\nproperties: " + str(self.properties) s += "\nloader: " + repr(self.loader) return s def __repr__(self) -> str: return self.__str__() @staticmethod def resolve_src_dst_value(value: Union[int, str, Tuple[Union[int, str], str]]): """Resolve the edge's source and destination. Args: value (Union[int, str, Tuple[Union[int, str], str]]): 1. a int, represent vid id. a str, represent vid name 2. a ([int/str], str). former represents vid, latter represents label Raises: SyntaxError: If the format is incorrect. """ if isinstance(value, (int, str)): check_argument( isinstance(value, int) or (isinstance(value, str) and not value.isdecimal()), "Column name cannot be decimal", ) return value, "" elif isinstance(value, Sequence): check_argument(len(value) == 2) check_argument( isinstance(value[0], int) or (isinstance(value[0], str) and not value[0].isdecimal()), "Column name cannot be decimal", ) check_argument(isinstance(value[1], str), "Label must be str") return value[0], value[1] else: raise InvalidArgumentError( "Source / destination format incorrect. Expect vid or [vid, source_label]" ) def set_source(self, source: Union[int, str, Tuple[Union[int, str], str]]): self.source_vid, self.source_label = self.resolve_src_dst_value(source) def set_destination( self, destination: Union[int, str, Tuple[Union[int, str], str]] ): self.destination_vid, self.destination_label = self.resolve_src_dst_value( destination ) def set_load_strategy(self, strategy: str): check_argument( strategy in ("only_out", "only_in", "both_out_in"), "invalid load strategy: " + strategy, ) self.load_strategy = strategy def add_property(self, prop: str, dtype=None) -> None: """prop is a str, representing name. It can optionally have a type.""" self.properties.append((prop, utils.unify_type(dtype))) def add_properties(self, properties: Sequence) -> None: for prop in properties: if isinstance(prop, str): self.add_property(prop) else: self.add_property(prop[0], prop[1]) def get_attr(self): attr_list = attr_value_pb2.NameAttrList() attr_list.name = "{}_{}".format(self.source_label, self.destination_label) attr_list.attr[types_pb2.SRC_LABEL].CopyFrom(utils.s_to_attr(self.source_label)) attr_list.attr[types_pb2.DST_LABEL].CopyFrom( utils.s_to_attr(self.destination_label) ) attr_list.attr[types_pb2.LOAD_STRATEGY].CopyFrom( utils.s_to_attr(self.load_strategy) ) attr_list.attr[types_pb2.SRC_VID].CopyFrom( utils.s_to_attr(str(self.source_vid)) ) attr_list.attr[types_pb2.DST_VID].CopyFrom( utils.s_to_attr(str(self.destination_vid)) ) attr_list.attr[types_pb2.LOADER].CopyFrom(self.loader.get_attr()) props = [] for prop in self.properties[2:]: prop_attr = attr_value_pb2.NameAttrList() prop_attr.name = prop[0] prop_attr.attr[0].CopyFrom(utils.type_to_attr(prop[1])) props.append(prop_attr) attr_list.attr[types_pb2.PROPERTIES].list.func.extend(props) return attr_list class EdgeLabel(object): """Hold meta informations of an edge label. An Edge label may be consist of a few `EdgeSubLabel`s. i.e. src_label1 -> edge_label -> dst_label1 src_label2 -> edge_label -> dst_label2 src_label3 -> edge_label -> dst_label3 """ def __init__(self, label: str): self.label = label self.sub_labels = [] self._finished = False def __str__(self): s = "\ntype: EdgeLabel" s += "\nlabel: " + self.label s += "\nsub_labels: " for sub_label in self.sub_labels: s += "\n" s += str(sub_label) return s def __repr__(self): return self.__str__() def add_sub_label(self, sub_label): self.sub_labels.append(sub_label) def finish(self, id_type: str = "int64_t"): for sub_label in self.sub_labels: sub_label.finish(id_type) def process_vertex(vertex: VertexLabel) -> attr_value_pb2.NameAttrList: attr_list = attr_value_pb2.NameAttrList() attr_list.name = "vertex" attr_list.attr[types_pb2.LABEL].CopyFrom(utils.s_to_attr(vertex.label)) attr_list.attr[types_pb2.VID].CopyFrom(utils.s_to_attr(str(vertex.vid))) props = [] for prop in vertex.properties[1:]: prop_attr = attr_value_pb2.NameAttrList() prop_attr.name = prop[0] prop_attr.attr[0].CopyFrom(utils.type_to_attr(prop[1])) props.append(prop_attr) attr_list.attr[types_pb2.PROPERTIES].list.func.extend(props) attr_list.attr[types_pb2.LOADER].CopyFrom(vertex.loader.get_attr()) return attr_list def process_edge(edge: EdgeLabel) -> attr_value_pb2.NameAttrList: attr_list = attr_value_pb2.NameAttrList() attr_list.name = "edge" attr_list.attr[types_pb2.LABEL].CopyFrom(utils.s_to_attr(edge.label)) sub_label_attr = [sub_label.get_attr() for sub_label in edge.sub_labels] attr_list.attr[types_pb2.SUB_LABEL].list.func.extend(sub_label_attr) return attr_list def _sanity_check(edges: Sequence[EdgeLabel], vertices: Sequence[VertexLabel]): vertex_labels = [] for v in vertices: vertex_labels.append(v.label) if not vertex_labels: vertex_labels.append("_") for edge in edges: # Check source label and destination label check_argument(len(edge.sub_labels) != 0, "Edge label is malformed.") for sub_label in edge.sub_labels: if sub_label.source_label or sub_label.destination_label: if not (sub_label.source_label and sub_label.destination_label): raise RuntimeError( "source label and destination label must be both specified or either unspecified" ) # Handle default label. If edge doesn't specify label, then use default. if not sub_label.source_label and not sub_label.destination_label: check_argument(len(vertex_labels) == 1, "ambiguous vertex label") if len(vertex_labels) == 1: sub_label.source_label = ( sub_label.destination_label ) = vertex_labels[0] if vertices is not None and len(vertices) > 0: check_argument( sub_label.source_label in vertex_labels, "source label not found in vertex labels", ) check_argument( sub_label.destination_label in vertex_labels, "destination label not found in vertex labels", ) check_argument( sub_label.source_vid != sub_label.destination_vid, "source col and destination col cannot refer to the same col", ) return edges, vertices def _get_config( edges: Sequence[EdgeLabel], vertices: Sequence[VertexLabel], directed: bool, oid_type: str, generate_eid: bool, ) -> Dict: config = {} attr = attr_value_pb2.AttrValue() for label in chain(edges, vertices): label.finish(oid_type) for edge in edges: attr.list.func.extend([process_edge(edge)]) attr.list.func.extend([process_vertex(vertex) for vertex in vertices]) directed_attr = utils.b_to_attr(directed) generate_eid_attr = utils.b_to_attr(generate_eid) config[types_pb2.ARROW_PROPERTY_DEFINITION] = attr config[types_pb2.DIRECTED] = directed_attr config[types_pb2.OID_TYPE] = utils.s_to_attr(oid_type) config[types_pb2.GENERATE_EID] = generate_eid_attr # vid_type is fixed config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(False) return config def normalize_parameter_edges( edges: Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants ] ): """Normalize parameters user passed in. Since parameters are very flexible, we need to be careful about it. Args: edges (Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants ]): Edges definition. """ def process_sub_label(items): if isinstance(items, (Loader, str, pd.DataFrame, VineyardObject)): return EdgeSubLabel(items, properties=None, source=None, destination=None) elif isinstance(items, Sequence): if all([isinstance(item, np.ndarray) for item in items]): return EdgeSubLabel( loader=items, properties=None, source=None, destination=None ) else: check_argument(len(items) < 6, "Too many arguments for a edge label") return EdgeSubLabel(*items) elif isinstance(items, Mapping): return EdgeSubLabel(**items) else: raise SyntaxError("Wrong format of e sub label: " + str(items)) def process_label(label, items): e_label = EdgeLabel(label) if isinstance(items, (Loader, str, pd.DataFrame, VineyardObject)): e_label.add_sub_label(process_sub_label(items)) elif isinstance(items, Sequence): if isinstance( items[0], (Loader, str, pd.DataFrame, VineyardObject, np.ndarray) ): e_label.add_sub_label(process_sub_label(items)) else: for item in items: e_label.add_sub_label(process_sub_label(item)) elif isinstance(items, Mapping): e_label.add_sub_label(process_sub_label(items)) else: raise SyntaxError("Wrong format of e label: " + str(items)) return e_label e_labels = [] if edges is None: raise ValueError("Edges should be None") if isinstance(edges, Mapping): for label, attr in edges.items(): e_labels.append(process_label(label, attr)) else: e_labels.append(process_label("_", edges)) return e_labels def normalize_parameter_vertices( vertices: Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants, None, ] ): """Normalize parameters user passed in. Since parameters are very flexible, we need to be careful about it. Args: vertices (Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants, None, ]): Vertices definition. """ def process_label(label, items): if isinstance(items, (Loader, str, pd.DataFrame, VineyardObject)): return VertexLabel(label=label, loader=items) elif isinstance(items, Sequence): if all([isinstance(item, np.ndarray) for item in items]): return VertexLabel(label=label, loader=items) else: check_argument(len(items) < 4, "Too many arguments for a vertex label") return VertexLabel(label, *items) elif isinstance(items, Mapping): return VertexLabel(label, **items) else: raise RuntimeError("Wrong format of v label: " + str(items)) v_labels = [] if vertices is None: return v_labels if isinstance(vertices, Mapping): for label, attr in vertices.items(): v_labels.append(process_label(label, attr)) else: v_labels.append(process_label("_", vertices)) return v_labels def load_from( edges: Union[ Mapping[str, Union[LoaderVariants, Sequence, Mapping]], LoaderVariants, Sequence ], vertices: Union[ Mapping[str, Union[LoaderVariants, Sequence, Mapping]], LoaderVariants, Sequence, None, ] = None, directed=True, oid_type="int64_t", generate_eid=True, ) -> Graph: """Load a Arrow property graph using a list of vertex/edge specifications. - Use Dict of tuples to setup a graph. We can use a dict to set vertex and edge configurations, which can be used to build graphs. Examples: .. code:: ipython g = graphscope_session.load_from( edges={ "group": [ ( "file:///home/admin/group.e", ["group_id", "member_size"], ("leader_student_id", "student"), ("member_student_id", "student"), ), ( "file:///home/admin/group_for_teacher_student.e", ["group_id", "group_name", "establish_date"], ("teacher_in_charge_id", "teacher"), ("member_student_id", "student"), ), ] }, vertices={ "student": ( "file:///home/admin/student.v", ["name", "lesson_nums", "avg_score"], "student_id", ), "teacher": ( "file:///home/admin/teacher.v", ["name", "salary", "age"], "teacher_id", ), }, ) 'e' is the label of edges, and 'v' is the label for vertices, edges are stored in the 'both_in_out' format edges with label 'e' linking from 'v' to 'v'. - Use Dict of dict to setup a graph. We can also give each element inside the tuple a meaningful name, makes it more understandable. Examples: .. code:: ipython g = graphscope_session.load_from( edges={ "group": [ { "loader": "file:///home/admin/group.e", "properties": ["group_id", "member_size"], "source": ("leader_student_id", "student"), "destination": ("member_student_id", "student"), }, { "loader": "file:///home/admin/group_for_teacher_student.e", "properties": ["group_id", "group_name", "establish_date"], "source": ("teacher_in_charge_id", "teacher"), "destination": ("member_student_id", "student"), }, ] }, vertices={ "student": { "loader": "file:///home/admin/student.v", "properties": ["name", "lesson_nums", "avg_score"], "vid": "student_id", }, "teacher": { "loader": "file:///home/admin/teacher.v", "properties": ["name", "salary", "age"], "vid": "teacher_id", }, }, ) Args: edges: Edge configuration of the graph vertices (optional): Vertices configurations of the graph. Defaults to None. If None, we assume all edge's src_label and dst_label are deduced and unambiguous. directed (bool, optional): Indicate whether the graph should be treated as directed or undirected. oid_type (str, optional): ID type of graph. Can be "int64_t" or "string". Defaults to "int64_t". generate_eid (bool, optional): Whether to generate a unique edge id for each edge. Generated eid will be placed in third column. This feature is for cooperating with interactive engine. If you only need to work with analytical engine, set it to False. Defaults to False. """ # Don't import the :code:`nx` in top-level statments to improve the # performance of :code:`import graphscope`. from graphscope.experimental import nx sess = get_default_session() if sess is None: raise ValueError("No default session found.") if isinstance(edges, (Graph, nx.Graph, VineyardObject)): return Graph(sess.session_id, edges) oid_type = utils.normalize_data_type_str(oid_type) e_labels = normalize_parameter_edges(edges) v_labels = normalize_parameter_vertices(vertices) e_labels, v_labels = _sanity_check(e_labels, v_labels) config = _get_config(e_labels, v_labels, directed, oid_type, generate_eid) op = dag_utils.create_graph(sess.session_id, types_pb2.ARROW_PROPERTY, attrs=config) graph_def = sess.run(op) graph = Graph(sess.session_id, graph_def) return graph g = load_from
[ "itertools.chain", "graphscope.framework.utils.s_to_attr", "graphscope.client.session.get_default_session", "graphscope.framework.errors.InvalidArgumentError", "graphscope.proto.attr_value_pb2.AttrValue", "graphscope.framework.utils.b_to_attr", "graphscope.framework.utils.normalize_data_type_str", "graphscope.framework.utils.type_to_attr", "graphscope.framework.utils.unify_type", "graphscope.framework.dag_utils.create_graph", "graphscope.framework.graph.Graph", "graphscope.framework.loader.Loader", "graphscope.framework.errors.check_argument", "graphscope.proto.attr_value_pb2.NameAttrList" ]
[((9787, 9816), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (9814, 9816), False, 'from graphscope.proto import attr_value_pb2\n'), ((10479, 10508), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (10506, 10508), False, 'from graphscope.proto import attr_value_pb2\n'), ((12777, 12803), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {}), '()\n', (12801, 12803), False, 'from graphscope.proto import attr_value_pb2\n'), ((12822, 12844), 'itertools.chain', 'chain', (['edges', 'vertices'], {}), '(edges, vertices)\n', (12827, 12844), False, 'from itertools import chain\n'), ((13050, 13075), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['directed'], {}), '(directed)\n', (13065, 13075), False, 'from graphscope.framework import utils\n'), ((13100, 13129), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['generate_eid'], {}), '(generate_eid)\n', (13115, 13129), False, 'from graphscope.framework import utils\n'), ((13265, 13290), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['oid_type'], {}), '(oid_type)\n', (13280, 13290), False, 'from graphscope.framework import utils\n'), ((13403, 13430), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (13418, 13430), False, 'from graphscope.framework import utils\n'), ((13475, 13497), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(False)'], {}), '(False)\n', (13490, 13497), False, 'from graphscope.framework import utils\n'), ((21800, 21821), 'graphscope.client.session.get_default_session', 'get_default_session', ([], {}), '()\n', (21819, 21821), False, 'from graphscope.client.session import get_default_session\n'), ((22018, 22057), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['oid_type'], {}), '(oid_type)\n', (22047, 22057), False, 'from graphscope.framework import utils\n'), ((22307, 22386), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['sess.session_id', 'types_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(sess.session_id, types_pb2.ARROW_PROPERTY, attrs=config)\n', (22329, 22386), False, 'from graphscope.framework import dag_utils\n'), ((22428, 22461), 'graphscope.framework.graph.Graph', 'Graph', (['sess.session_id', 'graph_def'], {}), '(sess.session_id, graph_def)\n', (22433, 22461), False, 'from graphscope.framework.graph import Graph\n'), ((7026, 7135), 'graphscope.framework.errors.check_argument', 'check_argument', (["(strategy in ('only_out', 'only_in', 'both_out_in'))", "('invalid load strategy: ' + strategy)"], {}), "(strategy in ('only_out', 'only_in', 'both_out_in'), \n 'invalid load strategy: ' + strategy)\n", (7040, 7135), False, 'from graphscope.framework.errors import check_argument\n'), ((7693, 7722), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (7720, 7722), False, 'from graphscope.proto import attr_value_pb2\n'), ((9893, 9922), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex.label'], {}), '(vertex.label)\n', (9908, 9922), False, 'from graphscope.framework import utils\n'), ((10077, 10106), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (10104, 10106), False, 'from graphscope.proto import attr_value_pb2\n'), ((10583, 10610), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['edge.label'], {}), '(edge.label)\n', (10598, 10610), False, 'from graphscope.framework import utils\n'), ((21973, 22002), 'graphscope.framework.graph.Graph', 'Graph', (['sess.session_id', 'edges'], {}), '(sess.session_id, edges)\n', (21978, 22002), False, 'from graphscope.framework.graph import Graph\n'), ((1880, 1894), 'graphscope.framework.loader.Loader', 'Loader', (['loader'], {}), '(loader)\n', (1886, 1894), False, 'from graphscope.framework.loader import Loader\n'), ((3646, 3660), 'graphscope.framework.loader.Loader', 'Loader', (['loader'], {}), '(loader)\n', (3652, 3660), False, 'from graphscope.framework.loader import Loader\n'), ((7859, 7893), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.source_label'], {}), '(self.source_label)\n', (7874, 7893), False, 'from graphscope.framework import utils\n'), ((7961, 8000), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.destination_label'], {}), '(self.destination_label)\n', (7976, 8000), False, 'from graphscope.framework import utils\n'), ((8081, 8116), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.load_strategy'], {}), '(self.load_strategy)\n', (8096, 8116), False, 'from graphscope.framework import utils\n'), ((8516, 8545), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (8543, 8545), False, 'from graphscope.proto import attr_value_pb2\n'), ((10175, 10202), 'graphscope.framework.utils.type_to_attr', 'utils.type_to_attr', (['prop[1]'], {}), '(prop[1])\n', (10193, 10202), False, 'from graphscope.framework import utils\n'), ((12384, 12516), 'graphscope.framework.errors.check_argument', 'check_argument', (['(sub_label.source_vid != sub_label.destination_vid)', '"""source col and destination col cannot refer to the same col"""'], {}), "(sub_label.source_vid != sub_label.destination_vid,\n 'source col and destination col cannot refer to the same col')\n", (12398, 12516), False, 'from graphscope.framework.errors import check_argument\n'), ((2962, 2985), 'graphscope.framework.utils.unify_type', 'utils.unify_type', (['dtype'], {}), '(dtype)\n', (2978, 2985), False, 'from graphscope.framework import utils\n'), ((6459, 6560), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Source / destination format incorrect. Expect vid or [vid, source_label]"""'], {}), "(\n 'Source / destination format incorrect. Expect vid or [vid, source_label]')\n", (6479, 6560), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((7381, 7404), 'graphscope.framework.utils.unify_type', 'utils.unify_type', (['dtype'], {}), '(dtype)\n', (7397, 7404), False, 'from graphscope.framework import utils\n'), ((8622, 8649), 'graphscope.framework.utils.type_to_attr', 'utils.type_to_attr', (['prop[1]'], {}), '(prop[1])\n', (8640, 8649), False, 'from graphscope.framework import utils\n'), ((12030, 12132), 'graphscope.framework.errors.check_argument', 'check_argument', (['(sub_label.source_label in vertex_labels)', '"""source label not found in vertex labels"""'], {}), "(sub_label.source_label in vertex_labels,\n 'source label not found in vertex labels')\n", (12044, 12132), False, 'from graphscope.framework.errors import check_argument\n'), ((12204, 12316), 'graphscope.framework.errors.check_argument', 'check_argument', (['(sub_label.destination_label in vertex_labels)', '"""destination label not found in vertex labels"""'], {}), "(sub_label.destination_label in vertex_labels,\n 'destination label not found in vertex labels')\n", (12218, 12316), False, 'from graphscope.framework.errors import check_argument\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Manage sessions to the GraphScope coordinator. """ import atexit import base64 import contextlib import copy import json import logging import os import random import sys import threading import time import warnings from queue import Empty as EmptyQueue try: from kubernetes import config as kube_config except ImportError: kube_config = None import graphscope from graphscope.client.rpc import GRPCClient from graphscope.client.utils import CaptureKeyboardInterrupt from graphscope.client.utils import GSLogger from graphscope.client.utils import set_defaults from graphscope.config import GSConfig as gs_config from graphscope.deploy.hosts.cluster import HostsClusterLauncher from graphscope.deploy.kubernetes.cluster import KubernetesClusterLauncher from graphscope.framework.errors import ConnectionError from graphscope.framework.errors import FatalError from graphscope.framework.errors import GRPCError from graphscope.framework.errors import InteractiveEngineInternalError from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.errors import K8sError from graphscope.framework.errors import LearningEngineInternalError from graphscope.framework.errors import check_argument from graphscope.framework.graph import Graph from graphscope.framework.operation import Operation from graphscope.interactive.query import InteractiveQuery from graphscope.interactive.query import InteractiveQueryStatus from graphscope.proto import message_pb2 from graphscope.proto import op_def_pb2 from graphscope.proto import types_pb2 DEFAULT_CONFIG_FILE = os.environ.get( "GS_CONFIG_PATH", os.path.expanduser("~/.graphscope/session.json") ) _session_dict = {} logger = logging.getLogger("graphscope") class Session(object): """A class for interacting with GraphScope graph computation service cluster. A :class:`Session` object encapsulates the environment in which :class:`Operation` objects are executed/evaluated. A session may own resources. It is important to release these resources when they are no longer required. To do this, invoke the :meth:`close` method on the session. A Session can register itself as default session with :meth:`as_default`, and all operations after that will use the default session. Session deregister itself as a default session when closed. The following example demonstrates its usage: .. code:: python >>> import graphscope as gs >>> # use session object explicitly >>> sess = gs.session() >>> g = sess.g() >>> pg = g.project(vertices={'v': []}, edges={'e': ['dist']}) >>> r = s.sssp(g, 4) >>> s.close() >>> # or use a session as default >>> s = gs.session().as_default() >>> g = g() >>> pg = g.project(vertices={'v': []}, edges={'e': ['dist']}) >>> r = gs.sssp(pg, 4) >>> s.close() We support setup a service cluster and create a RPC session in following ways: - GraphScope graph computation service run in cluster managed by kubernetes. >>> s = graphscope.session() Also, :class:`Session` provides several keyword params for users to define the cluster. You may use the param :code:`k8s_gs_image` to specify the image for all engine pod, and param :code:`k8s_engine_cpu` or :code:`k8s_engine_mem` to specify the resources. More, you can find all params detail in :meth:`__init__` method. >>> s = graphscope.session( ... k8s_gs_image="registry.cn-hongkong.aliyuncs.com/graphscope/graphscope:latest", ... k8s_vineyard_cpu=0.1, ... k8s_vineyard_mem="256Mi", ... k8s_vineyard_shared_mem="4Gi", ... k8s_engine_cpu=0.1, ... k8s_engine_mem="256Mi") - or all params can be provided by a json configuration file or configuration dict. >>> s = graphscope.session(config='/tmp/config.json') >>> # Or >>> s = graphscope.session(config={'k8s_engine_cpu': 5, 'k8s_engine_mem': '5Gi'}) """ @set_defaults(gs_config) def __init__( self, config=None, cluster_type=gs_config.cluster_type, addr=gs_config.addr, num_workers=gs_config.num_workers, preemptive=gs_config.preemptive, k8s_namespace=gs_config.k8s_namespace, k8s_service_type=gs_config.k8s_service_type, k8s_gs_image=gs_config.k8s_gs_image, k8s_etcd_image=gs_config.k8s_etcd_image, k8s_gie_graph_manager_image=gs_config.k8s_gie_graph_manager_image, k8s_zookeeper_image=gs_config.k8s_zookeeper_image, k8s_image_pull_policy=gs_config.k8s_image_pull_policy, k8s_image_pull_secrets=gs_config.k8s_image_pull_secrets, k8s_coordinator_cpu=gs_config.k8s_coordinator_cpu, k8s_coordinator_mem=gs_config.k8s_coordinator_mem, k8s_etcd_num_pods=gs_config.k8s_etcd_num_pods, k8s_etcd_cpu=gs_config.k8s_etcd_cpu, k8s_etcd_mem=gs_config.k8s_etcd_mem, k8s_zookeeper_cpu=gs_config.k8s_zookeeper_cpu, k8s_zookeeper_mem=gs_config.k8s_zookeeper_mem, k8s_gie_graph_manager_cpu=gs_config.k8s_gie_graph_manager_cpu, k8s_gie_graph_manager_mem=gs_config.k8s_gie_graph_manager_mem, k8s_vineyard_daemonset=gs_config.k8s_vineyard_daemonset, k8s_vineyard_cpu=gs_config.k8s_vineyard_cpu, k8s_vineyard_mem=gs_config.k8s_vineyard_mem, k8s_vineyard_shared_mem=gs_config.k8s_vineyard_shared_mem, k8s_engine_cpu=gs_config.k8s_engine_cpu, k8s_engine_mem=gs_config.k8s_engine_mem, k8s_mars_worker_cpu=gs_config.mars_worker_cpu, k8s_mars_worker_mem=gs_config.mars_worker_mem, k8s_mars_scheduler_cpu=gs_config.mars_scheduler_cpu, k8s_mars_scheduler_mem=gs_config.mars_scheduler_mem, k8s_volumes=gs_config.k8s_volumes, k8s_waiting_for_delete=gs_config.k8s_waiting_for_delete, timeout_seconds=gs_config.timeout_seconds, dangling_timeout_seconds=gs_config.dangling_timeout_seconds, with_mars=gs_config.with_mars, **kw ): """Construct a new GraphScope session. Args: config (dict or str, optional): The configuration dict or file about how to launch the GraphScope instance. For str, it will identify it as a path and read the configuration file to build a session if file exist. If not specified, the global default configuration :code:`DEFAULT_CONFIG_FILE` will be used, which get value of GS_CONFIG_PATH in environment. Note that it will overwrite explicit parameters. Defaults to None. addr (str, optional): The endpoint of a pre-launched GraphScope instance with '<ip>:<port>' format. A new session id will be generated for each session connection. cluster_type (str, optional): Deploy GraphScope instance on hosts or k8s cluster. Defaults to k8s. Available options: "k8s" and "hosts". Note that only support deployed on localhost with hosts mode. num_workers (int, optional): The number of workers to launch GraphScope engine. Defaults to 2. preemptive (bool, optional): If True, GraphScope instance will treat resource params (e.g. k8s_coordinator_cpu) as limits and provide the minimum available value as requests, but this will make pod has a `Burstable` QOS, which can be preempted by other pods with high QOS. Otherwise, it will set both requests and limits with the same value. k8s_namespace (str, optional): Contains the namespace to create all resource inside. If param missing, it will try to read namespace from kubernetes context, or a random namespace will be created and deleted if namespace not exist. Defaults to None. k8s_service_type (str, optional): Type determines how the GraphScope service is exposed. Valid options are NodePort, and LoadBalancer. Defaults to NodePort. k8s_gs_image (str, optional): The GraphScope engine's image. k8s_etcd_image (str, optional): The image of etcd, which used by vineyard. k8s_image_pull_policy (str, optional): Kubernetes image pull policy. Defaults to "IfNotPresent". k8s_image_pull_secrets (list[str], optional): A list of secret name used to authorize pull image. k8s_gie_graph_manager_image (str, optional): The GraphScope interactive engine's graph manager image. k8s_zookeeper_image (str, optional): The image of zookeeper, which used by GIE graph manager. k8s_vineyard_daemonset (str, optional): The name of vineyard Helm deployment to use. GraphScope will try to discovery the daemonset from kubernetes cluster, then use it if exists, and fallback to launching a bundled vineyard container otherwise. k8s_vineyard_cpu (float, optional): Minimum number of CPU cores request for vineyard container. Defaults to 0.5. k8s_vineyard_mem (str, optional): Minimum number of memory request for vineyard container. Defaults to '512Mi'. k8s_vineyard_shared_mem (str, optional): Init size of vineyard shared memory. Defaults to '4Gi'. k8s_engine_cpu (float, optional): Minimum number of CPU cores request for engine container. Defaults to 0.5. k8s_engine_mem (str, optional): Minimum number of memory request for engine container. Defaults to '4Gi'. k8s_coordinator_cpu (float, optional): Minimum number of CPU cores request for coordinator pod. Defaults to 1.0. k8s_coordinator_mem (str, optional): Minimum number of memory request for coordinator pod. Defaults to '4Gi'. k8s_etcd_num_pods (int, optional): The number of etcd pods. Defaults to 3. k8s_etcd_cpu (float, optional): Minimum number of CPU cores request for etcd pod. Defaults to 0.5. k8s_etcd_mem (str, optional): Minimum number of memory request for etcd pod. Defaults to '128Mi'. k8s_zookeeper_cpu (float, optional): Minimum number of CPU cores request for zookeeper container. Defaults to 0.5. k8s_zookeeper_mem (str, optional): Minimum number of memory request for zookeeper container. Defaults to '256Mi'. k8s_gie_graph_manager_cpu (float, optional): Minimum number of CPU cores request for graphmanager container. Defaults to 1.0. k8s_gie_graph_manager_mem (str, optional): Minimum number of memory request for graphmanager container. Defaults to '4Gi'. k8s_mars_worker_cpu (float, optional): Minimum number of CPU cores request for mars worker container. Defaults to 0.5. k8s_mars_worker_mem (str, optional): Minimum number of memory request for mars worker container. Defaults to '4Gi'. k8s_mars_scheduler_cpu (float, optional): Minimum number of CPU cores request for mars scheduler container. Defaults to 0.5. k8s_mars_scheduler_mem (str, optional): Minimum number of memory request for mars scheduler container. Defaults to '2Gi'. with_mars (bool, optional): Launch graphscope with mars. Defaults to False. k8s_volumes (dict, optional): A dict of k8s volume which represents a directory containing data, accessible to the containers in a pod. Defaults to {}. For example, you can mount host path with: k8s_volumes = { "my-data": { "type": "hostPath", "field": { "path": "<path>", "type": "Directory" }, "mounts": [ { "mountPath": "<path1>" }, { "mountPath": "<path2>" } ] } } Or you can mount PVC with: k8s_volumes = { "my-data": { "type": "persistentVolumeClaim", "field": { "claimName": "your-pvc-name" }, "mounts": [ { "mountPath": "<path1>" } ] } } Also, you can mount a single volume with: k8s_volumes = { "my-data": { "type": "hostPath", "field": {xxx}, "mounts": { "mountPath": "<path1>" } } } timeout_seconds (int, optional): For waiting service ready (or waiting for delete if k8s_waiting_for_delete is True). dangling_timeout_seconds (int, optional): After seconds of client disconnect, coordinator will kill this graphscope instance. Defaults to 600. Expect this value to be greater than 5 (heartbeat interval). Disable dangling check by setting -1. k8s_waiting_for_delete (bool, optional): Waiting for service delete or not. Defaults to False. **kw (dict, optional): Other optional parameters will be put to :code:`**kw`. - k8s_minikube_vm_driver: Deprecated. - k8s_client_config (dict, optional): Provide configurable parameters for connecting to remote k8s, which strongly relies on the `kube_config.new_client_from_config` function. eg: {"config_file": "~/.kube/config", "context": None, "persist_config": True} config_file: Name of the kube-config file. context: set the active context. If is set to None, current_context from config file will be used. persist_config: If True, config file will be updated when changed(e.g GCP token refresh). - log_level: Deprecated. Move this param as a global configuration. Set via `graphscope.set_option(log_level='DEBUG')` - show_log: Deprecated. Move this param as a global configuration.Set via `graphscope.set_option(show_log=True)` Raises: TypeError: If the given argument combination is invalid and cannot be used to create a GraphScope session. """ num_workers = int(num_workers) self._config_params = {} self._accessable_params = ( "addr", "cluster_type", "num_workers", "preemptive", "k8s_namespace", "k8s_service_type", "k8s_gs_image", "k8s_etcd_image", "k8s_image_pull_policy", "k8s_image_pull_secrets", "k8s_gie_graph_manager_image", "k8s_zookeeper_image", "k8s_coordinator_cpu", "k8s_coordinator_mem", "k8s_etcd_num_pods", "k8s_etcd_cpu", "k8s_etcd_mem", "k8s_zookeeper_cpu", "k8s_zookeeper_mem", "k8s_gie_graph_manager_cpu", "k8s_gie_graph_manager_mem", "k8s_vineyard_daemonset", "k8s_vineyard_cpu", "k8s_vineyard_mem", "k8s_vineyard_shared_mem", "k8s_engine_cpu", "k8s_engine_mem", "k8s_mars_worker_cpu", "k8s_mars_worker_mem", "k8s_mars_scheduler_cpu", "k8s_mars_scheduler_mem", "with_mars", "k8s_volumes", "k8s_waiting_for_delete", "timeout_seconds", "dangling_timeout_seconds", ) saved_locals = locals() for param in self._accessable_params: self._config_params[param] = saved_locals[param] # parse config, which should be a path to config file, or dict # config has highest priority if isinstance(config, dict): self._config_params.update(config) elif isinstance(config, str): self._load_config(config, False) elif DEFAULT_CONFIG_FILE: self._load_config(DEFAULT_CONFIG_FILE) # update other optional params self._config_params.update(kw) # initial setting of cluster_type self._cluster_type = self._parse_cluster_type() # mars cannot work with run-on-local mode if self._cluster_type == types_pb2.HOSTS and self._config_params["with_mars"]: raise NotImplementedError( "Mars cluster cannot be launched along with local GraphScope deployment" ) # deprecated params handle if "show_log" in kw: warnings.warn( "The `show_log` parameter has been deprecated and has no effect, " "please use `graphscope.set_option(show_log=%s)` instead." % kw.pop("show_log", None), category=DeprecationWarning, ) if "log_level" in kw: warnings.warn( "The `log_level` parameter has been deprecated and has no effect, " "please use `graphscope.set_option(log_level=%r)` instead." % kw.pop("show_log", None), category=DeprecationWarning, ) # update k8s_client_config params self._config_params["k8s_client_config"] = kw.pop("k8s_client_config", {}) # There should be no more custom keyword arguments. if kw: raise ValueError("Not recognized value: ", list(kw.keys())) if self._config_params["addr"]: logger.info( "Connecting graphscope session with address: %s", self._config_params["addr"], ) else: logger.info( "Initializing graphscope session with parameters: %s", self._config_params, ) self._closed = False # coordinator service endpoint self._coordinator_endpoint = None self._launcher = None self._heartbeat_sending_thread = None self._grpc_client = None self._session_id = None # unique identifier across sessions # engine config: # # { # "experiment": "ON/OFF", # "vineyard_socket": "...", # "vineyard_rpc_endpoint": "..." # } self._engine_config = None # interactive instance related graph map self._interactive_instance_dict = {} # learning engine related graph map self._learning_instance_dict = {} self._default_session = None atexit.register(self.close) # create and connect session with CaptureKeyboardInterrupt(self.close): self._connect() self._disconnected = False # heartbeat self._heartbeat_interval_seconds = 5 self._heartbeat_sending_thread = threading.Thread( target=self._send_heartbeat, args=() ) self._heartbeat_sending_thread.daemon = True self._heartbeat_sending_thread.start() def __repr__(self): return str(self.info) def __str__(self): return repr(self) @property def session_id(self): return self._session_id def _load_config(self, path, slient=True): config_path = os.path.expandvars(os.path.expanduser(path)) try: with open(config_path, "r") as f: data = json.load(f) self._config_params.update(data) except Exception as exp: # noqa if not slient: raise exp def _parse_cluster_type(self): if self._config_params["addr"] is not None: # get the cluster type after connecting return types_pb2.UNDEFINED else: if self._config_params["cluster_type"] == "hosts": self._run_on_local() return types_pb2.HOSTS elif self._config_params["cluster_type"] == "k8s": return types_pb2.K8S else: raise ValueError("Expect hosts or k8s of cluster_type parameter") @property def engine_config(self): """Show the engine configration associated with session in json format.""" return self._engine_config @property def info(self): """Show all resources info associated with session in json format.""" info = {} if self._closed: info["status"] = "closed" elif self._grpc_client is None or self._disconnected: info["status"] = "disconnected" else: info["status"] = "active" if self._cluster_type == types_pb2.K8S: info["type"] = "k8s" info["engine_hosts"] = ",".join(self._pod_name_list) info["namespace"] = self._config_params["k8s_namespace"] else: info["type"] = "hosts" info["engine_hosts"] = ",".join(self._config_params["hosts"]) info["cluster_type"] = str(self._cluster_type) info["session_id"] = self.session_id info["num_workers"] = self._config_params["num_workers"] info["coordinator_endpoint"] = self._coordinator_endpoint info["engine_config"] = self._engine_config return info def _send_heartbeat(self): while not self._closed: if self._grpc_client: try: self._grpc_client.send_heartbeat() except GRPCError as exc: logger.warning(exc) self._disconnected = True else: self._disconnected = False time.sleep(self._heartbeat_interval_seconds) def close(self): """Closes this session. This method frees all resources associated with the session. """ if self._closed: return self._closed = True self._coordinator_endpoint = None self._deregister_default() if self._heartbeat_sending_thread: self._heartbeat_sending_thread.join( timeout=self._heartbeat_interval_seconds ) self._heartbeat_sending_thread = None self._disconnected = True # close all interactive instances for instance in self._interactive_instance_dict.values(): try: if instance is not None: instance.close() except InteractiveEngineInternalError: pass self._interactive_instance_dict.clear() # close all learning instances for instance in self._learning_instance_dict.values(): try: if instance is not None: instance.close() except LearningEngineInternalError: pass self._learning_instance_dict.clear() if self._grpc_client: self._grpc_client.close() self._grpc_client = None _session_dict.pop(self._session_id, None) # clean up if self._config_params["addr"] is None: if self._launcher: self._launcher.stop() self._pod_name_list = [] def _close_interactive_instance(self, instance): """Close a interactive instance.""" if self._grpc_client: self._grpc_client.close_interactive_engine(instance.object_id) self._interactive_instance_dict[instance.object_id] = None def _close_learning_instance(self, instance): """Close a learning instance.""" if self._grpc_client: self._grpc_client.close_learning_engine(instance.object_id) self._learning_instance_dict[instance.object_id] = None def __del__(self): # cleanly ignore all exceptions try: self.close() except Exception: # pylint: disable=broad-except pass def as_default(self): """Obtain a context manager that make this object as default session. This method is used when a Session is constructed, which will immediately install self as a default session. Raises: ValueError: If default session exist in current context. Returns: A context manager using this session as the default session. """ if not _default_session_stack.is_cleared(): raise ValueError( "A default session is already active. You must explicitly call Session.close()." ) # session context manager self._default_session = default_session(self) self._default_session.__enter__() def _deregister_default(self): """Remove self from the default session stack.""" if self._default_session: self._default_session.__exit__(None, None, None) self._default_session = None def run(self, fetch): """Run operations of `fetch`. Args: fetch: :class:`Operation` Raises: RuntimeError: Client disconnect to the service. Or run on a closed session. ValueError: If fetch is not a instance of :class:`Operation`. Or the fetch has been evaluated. InvalidArgumentError: Not recognized on output type. Returns: Different values for different output types of :class:`Operation` """ # prepare names to run and fetch if hasattr(fetch, "op"): fetch = fetch.op if not isinstance(fetch, Operation): raise ValueError("Expect a `Operation`") if fetch.output is not None: raise ValueError("The op <%s> are evaluated duplicated." % fetch.key) # convert to list to be compatible with rpc client method signature fetch_ops = [fetch] dag = op_def_pb2.DagDef() for op in fetch_ops: dag.op.extend([copy.deepcopy(op.as_op_def())]) if self._closed: raise RuntimeError("Attempted to use a closed Session.") if not self._grpc_client: raise RuntimeError("Session disconnected.") # execute the query try: response = self._grpc_client.run(dag) except FatalError: self.close() raise check_argument( len(fetch_ops) == 1, "Cannot execute multiple ops at the same time" ) return self._parse_value(fetch_ops[0], response) def _parse_value(self, op, response: message_pb2.RunStepResponse): # attach an output to op, indicating the op is already run. op.set_output(response.metrics) # if loads a arrow property graph, will return {'object_id': xxxx} if op.output_types == types_pb2.GRAPH: return response.graph_def if op.output_types == types_pb2.APP: return response.result.decode("utf-8") if op.output_types in ( types_pb2.RESULTS, types_pb2.VINEYARD_TENSOR, types_pb2.VINEYARD_DATAFRAME, ): return response.result.decode("utf-8") if op.output_types in (types_pb2.TENSOR, types_pb2.DATAFRAME): return response.result else: raise InvalidArgumentError( "Not recognized output type: %s" % op.output_types ) def _connect(self): if self._config_params["addr"] is not None: # try connect to exist coordinator self._coordinator_endpoint = self._config_params["addr"] elif self._cluster_type == types_pb2.K8S: if ( self._config_params["k8s_etcd_image"] is None or self._config_params["k8s_gs_image"] is None ): raise K8sError("None image found.") api_client = kube_config.new_client_from_config( **self._config_params["k8s_client_config"] ) self._launcher = KubernetesClusterLauncher( api_client=api_client, namespace=self._config_params["k8s_namespace"], service_type=self._config_params["k8s_service_type"], num_workers=self._config_params["num_workers"], gs_image=self._config_params["k8s_gs_image"], preemptive=self._config_params["preemptive"], etcd_image=self._config_params["k8s_etcd_image"], gie_graph_manager_image=self._config_params[ "k8s_gie_graph_manager_image" ], zookeeper_image=self._config_params["k8s_zookeeper_image"], image_pull_policy=self._config_params["k8s_image_pull_policy"], image_pull_secrets=self._config_params["k8s_image_pull_secrets"], vineyard_daemonset=self._config_params["k8s_vineyard_daemonset"], vineyard_cpu=self._config_params["k8s_vineyard_cpu"], vineyard_mem=self._config_params["k8s_vineyard_mem"], vineyard_shared_mem=self._config_params["k8s_vineyard_shared_mem"], etcd_num_pods=self._config_params["k8s_etcd_num_pods"], etcd_cpu=self._config_params["k8s_etcd_cpu"], etcd_mem=self._config_params["k8s_etcd_mem"], zookeeper_cpu=self._config_params["k8s_zookeeper_cpu"], zookeeper_mem=self._config_params["k8s_zookeeper_mem"], gie_graph_manager_cpu=self._config_params["k8s_gie_graph_manager_cpu"], gie_graph_manager_mem=self._config_params["k8s_gie_graph_manager_mem"], engine_cpu=self._config_params["k8s_engine_cpu"], engine_mem=self._config_params["k8s_engine_mem"], mars_worker_cpu=self._config_params["k8s_mars_worker_cpu"], mars_worker_mem=self._config_params["k8s_mars_worker_mem"], mars_scheduler_cpu=self._config_params["k8s_mars_scheduler_cpu"], mars_scheduler_mem=self._config_params["k8s_mars_scheduler_mem"], with_mars=self._config_params["with_mars"], coordinator_cpu=float(self._config_params["k8s_coordinator_cpu"]), coordinator_mem=self._config_params["k8s_coordinator_mem"], volumes=self._config_params["k8s_volumes"], waiting_for_delete=self._config_params["k8s_waiting_for_delete"], timeout_seconds=self._config_params["timeout_seconds"], dangling_timeout_seconds=self._config_params[ "dangling_timeout_seconds" ], ) elif ( self._cluster_type == types_pb2.HOSTS and isinstance(self._config_params["hosts"], list) and len(self._config_params["hosts"]) != 0 and self._config_params["num_workers"] > 0 ): # lanuch coordinator with hosts self._launcher = HostsClusterLauncher( hosts=self._config_params["hosts"], port=self._config_params["port"], num_workers=self._config_params["num_workers"], vineyard_socket=self._config_params["vineyard_socket"], timeout_seconds=self._config_params["timeout_seconds"], ) else: raise RuntimeError("Session initialize failed.") # launching graphscope service if self._launcher is not None: self._launcher.start() self._coordinator_endpoint = self._launcher.coordinator_endpoint # waiting service ready self._grpc_client = GRPCClient(self._coordinator_endpoint) self._grpc_client.waiting_service_ready( timeout_seconds=self._config_params["timeout_seconds"], ) # connect and fetch logs from rpc server try: ( self._session_id, self._cluster_type, self._engine_config, self._pod_name_list, self._config_params["num_workers"], self._config_params["k8s_namespace"], ) = self._grpc_client.connect( cleanup_instance=not bool(self._config_params["addr"]), dangling_timeout_seconds=self._config_params[ "dangling_timeout_seconds" ], ) # fetch logs if self._config_params["addr"] or self._cluster_type == types_pb2.K8S: self._grpc_client.fetch_logs() _session_dict[self._session_id] = self except Exception: self.close() raise def get_config(self): """Get configuration of the session.""" return self._config_params def g(self, incoming_data=None, oid_type="int64", directed=True, generate_eid=True): return Graph(self, incoming_data, oid_type, directed, generate_eid) def load_from(self, *args, **kwargs): """Load a graph within the session. See more information in :meth:`graphscope.load_from`. """ with default_session(self): return graphscope.load_from(*args, **kwargs) def _run_on_local(self): self._config_params["hosts"] = ["localhost"] self._config_params["port"] = None self._config_params["vineyard_socket"] = "" def _get_gl_handle(self, graph): """Dump a handler for GraphLearn for interaction. Fields in :code:`schema` are: + the name of node type or edge type + whether the graph is weighted graph + whether the graph is labeled graph + the number of int attributes + the number of float attributes + the number of string attributes An example of the graph handle: .. code:: python { "server": "127.0.0.1:8888,127.0.0.1:8889", "client_count": 1, "vineyard_socket": "/var/run/vineyard.sock", "vineyard_id": 13278328736, "node_schema": [ "user:false:false:10:0:0", "item:true:false:0:0:5" ], "edge_schema": [ "user:click:item:true:false:0:0:0", "user:buy:item:true:true:0:0:0", "item:similar:item:false:false:10:0:0" ], "node_attribute_types": { "person": { "age": "i", "name": "s", }, }, "edge_attribute_types": { "knows": { "weight": "f", }, }, } The handle can be decoded using: .. code:: python base64.b64decode(handle.encode('ascii')).decode('ascii') Note that the ports are selected from a range :code:`(8000, 9000)`. Args: graph (:class:`Graph`): A Property Graph. client_number (int): Number of client. Returns: str: Base64 encoded handle Raises: InvalidArgumentError: If the graph is not loaded, or graph_type isn't `ARROW_PROPERTY`. """ if not graph.loaded(): raise InvalidArgumentError("The graph has already been unloaded") if not graph.graph_type == types_pb2.ARROW_PROPERTY: raise InvalidArgumentError("The graph should be a property graph.") def group_property_types(props): weighted, labeled, i, f, s, attr_types = "false", "false", 0, 0, 0, {} for prop in props: if prop.type in [types_pb2.STRING]: s += 1 attr_types[prop.name] = "s" elif prop.type in (types_pb2.FLOAT, types_pb2.DOUBLE): f += 1 attr_types[prop.name] = "f" else: i += 1 attr_types[prop.name] = "i" if prop.name == "weight": weighted = "true" elif prop.name == "label": labeled = "true" return weighted, labeled, i, f, s, attr_types node_schema, node_attribute_types = [], dict() for label in graph.schema.vertex_labels: weighted, labeled, i, f, s, attr_types = group_property_types( graph.schema.get_vertex_properties(label) ) node_schema.append( "{}:{}:{}:{}:{}:{}".format(label, weighted, labeled, i, f, s) ) node_attribute_types[label] = attr_types edge_schema, edge_attribute_types = [], dict() for label in graph.schema.edge_labels: weighted, labeled, i, f, s, attr_types = group_property_types( graph.schema.get_edge_properties(label) ) for rel in graph.schema.get_relationships(label): edge_schema.append( "{}:{}:{}:{}:{}:{}:{}:{}".format( rel[0], label, rel[1], weighted, labeled, i, f, s ) ) edge_attribute_types[label] = attr_types handle = { "hosts": self.info["engine_hosts"], "client_count": 1, "vineyard_id": graph.vineyard_id, "vineyard_socket": self._engine_config["vineyard_socket"], "node_schema": node_schema, "edge_schema": edge_schema, "node_attribute_types": node_attribute_types, "edge_attribute_types": edge_attribute_types, } handle_json_string = json.dumps(handle) return base64.b64encode(handle_json_string.encode("utf-8")).decode("utf-8") @set_defaults(gs_config) def gremlin(self, graph, engine_params=None): """Get a interactive engine handler to execute gremlin queries. Note that this method will be executed implicitly when a property graph created and cache a instance of InteractiveQuery in session if `initializing_interactive_engine` is True. If you want to create a new instance under the same graph by different params, you should close the instance first. .. code:: python >>> # close and recreate InteractiveQuery. >>> interactive_query = sess.gremlin(g) >>> interactive_query.close() >>> interactive_query = sess.gremlin(g, engine_params={"xxx":"xxx"}) Args: graph (:class:`Graph`): Use the graph to create interactive instance. engine_params (dict, optional): Configure startup parameters of interactive engine. You can also configure this param by `graphscope.set_option(engine_params={})`. See a list of configurable keys in `interactive_engine/deploy/docker/dockerfile/executor.vineyard.properties` Raises: InvalidArgumentError: :code:`graph` is not a property graph or unloaded. Returns: :class:`InteractiveQuery` """ # self._interactive_instance_dict[graph.vineyard_id] will be None if # InteractiveQuery closed if ( graph.vineyard_id in self._interactive_instance_dict and self._interactive_instance_dict[graph.vineyard_id] is not None ): interactive_query = self._interactive_instance_dict[graph.vineyard_id] if interactive_query.status == InteractiveQueryStatus.Running: return interactive_query elif interactive_query.status == InteractiveQueryStatus.Failed: raise InteractiveEngineInternalError(interactive_query.error_msg) else: # Initializing. # while True is ok, as the status is either running or failed eventually after timeout. while True: time.sleep(1) if interactive_query.status == InteractiveQueryStatus.Running: return interactive_query elif interactive_query.status == InteractiveQueryStatus.Failed: raise InteractiveEngineInternalError( interactive_query.error_msg ) if not graph.loaded(): raise InvalidArgumentError("The graph has already been unloaded") if not graph.graph_type == types_pb2.ARROW_PROPERTY: raise InvalidArgumentError("The graph should be a property graph.") interactive_query = InteractiveQuery(session=self, object_id=graph.vineyard_id) self._interactive_instance_dict[graph.vineyard_id] = interactive_query if engine_params is not None: engine_params = { str(key): str(value) for key, value in engine_params.items() } else: engine_params = {} try: response = self._grpc_client.create_interactive_engine( object_id=graph.vineyard_id, schema_path=graph.schema_path, gremlin_server_cpu=gs_config.k8s_gie_gremlin_server_cpu, gremlin_server_mem=gs_config.k8s_gie_gremlin_server_mem, engine_params=engine_params, ) except Exception as e: interactive_query.status = InteractiveQueryStatus.Failed interactive_query.error_msg = str(e) raise InteractiveEngineInternalError(str(e)) from e else: interactive_query.set_frontend( front_ip=response.frontend_host, front_port=response.frontend_port ) interactive_query.status = InteractiveQueryStatus.Running graph._attach_interactive_instance(interactive_query) return interactive_query def learning(self, graph, nodes=None, edges=None, gen_labels=None): """Start a graph learning engine. Args: nodes (list): The node types that will be used for gnn training. edges (list): The edge types that will be used for gnn training. gen_labels (list): Extra node and edge labels on original graph for gnn training. Returns: `graphscope.learning.Graph`: An instance of `graphscope.learning.Graph` that could be feed to the learning engine. """ if ( graph.vineyard_id in self._learning_instance_dict and self._learning_instance_dict[graph.vineyard_id] is not None ): return self._learning_instance_dict[graph.vineyard_id] if sys.platform != "linux" and sys.platform != "linux2": raise RuntimeError( "The learning engine currently supports Linux only, doesn't support %s" % sys.platform ) if not graph.loaded(): raise InvalidArgumentError("The graph has already been unloaded") if not graph.graph_type == types_pb2.ARROW_PROPERTY: raise InvalidArgumentError("The graph should be a property graph.") from graphscope.learning.graph import Graph as LearningGraph handle = self._get_gl_handle(graph) config = LearningGraph.preprocess_args(handle, nodes, edges, gen_labels) config = base64.b64encode(json.dumps(config).encode("utf-8")).decode("utf-8") endpoints = self._grpc_client.create_learning_engine( graph.vineyard_id, handle, config ) handle = json.loads(base64.b64decode(handle.encode("utf-8")).decode("utf-8")) handle["server"] = endpoints handle["client_count"] = 1 learning_graph = LearningGraph(handle, config, graph.vineyard_id, self) self._learning_instance_dict[graph.vineyard_id] = learning_graph graph._attach_learning_instance(learning_graph) return learning_graph session = Session def set_option(**kwargs): """Set the value of specified options. Available options: - num_workers - log_level - show_log - k8s_namespace - k8s_service_type - k8s_gs_image - k8s_etcd_image - k8s_gie_graph_manager_image - k8s_zookeeper_image - k8s_image_pull_policy - k8s_image_pull_secrets - k8s_coordinator_cpu - k8s_coordinator_mem - k8s_vineyard_daemonset - k8s_vineyard_cpu - k8s_vineyard_mem - k8s_vineyard_shared_mem - k8s_engine_cpu - k8s_engine_mem - k8s_mars_worker_cpu - k8s_mars_worker_mem - k8s_mars_scheduler_cpu - k8s_mars_scheduler_mem - with_mars - k8s_waiting_for_delete - engine_params - initializing_interactive_engine - timeout_seconds Args: kwargs: dict kv pair of GraphScope config you want to set. Raises: ValueError: If no such option exists. Returns: None """ # check exists for k, v in kwargs.items(): if not hasattr(gs_config, k): raise ValueError("No such option {} exists.".format(k)) for k, v in kwargs.items(): setattr(gs_config, k, v) GSLogger.update() def get_option(key): """Get the value of specified option. Available options: - num_workers - log_level - show_log - k8s_namespace - k8s_service_type - k8s_gs_image - k8s_etcd_image - k8s_gie_graph_manager_image - k8s_zookeeper_image - k8s_image_pull_policy - k8s_image_pull_secrets - k8s_coordinator_cpu - k8s_coordinator_mem - k8s_vineyard_daemonset - k8s_vineyard_cpu - k8s_vineyard_mem - k8s_vineyard_shared_mem - k8s_engine_cpu - k8s_engine_mem - k8s_mars_worker_cpu - k8s_mars_worker_mem - k8s_mars_scheduler_cpu - k8s_mars_scheduler_mem - with_mars - k8s_waiting_for_delete - engine_params - initializing_interactive_engine - timeout_seconds Args: key: str Key of GraphScope config you want to get. Raises: ValueError: If no such option exists. Returns: result: the value of the option """ if hasattr(gs_config, key): return getattr(gs_config, key) else: raise ValueError("No such option {} exists.".format(key)) def default_session(session): """Python's :code:`with` handler for defining a default session. This function provides a means of registering a session for handling and code that need a default session calls. The :code:`with` keyword to specify that code invocations within the scope of a block should be executed by a particular session. Args: session: :class:`Session` The session to be installed as the default session. Returns: A context manager for the default session. """ return _default_session_stack.get_controller(session) def get_default_session(): """Returns the default session for the current context. Raises: RuntimeError: Default session is not exist. Returns: The default :class:`Session`. """ return _default_session_stack.get_default() def get_session_by_id(handle): """Return the session by handle.""" if handle not in _session_dict: raise ValueError("Session not exists.") return _session_dict.get(handle) class _DefaultSessionStack(object): """A stack of objects for providing implicit defaults.""" def __init__(self): super().__init__() self.stack = [] def get_default(self): if not self.stack: raise RuntimeError("No default session found.") return self.stack[-1] def reset(self): self.stack = [] def is_cleared(self): return not self.stack @contextlib.contextmanager def get_controller(self, default): """A context manager for manipulating a default stack.""" self.stack.append(default) try: yield default finally: # stack may be empty if reset() was called if self.stack: self.stack.remove(default) _default_session_stack = _DefaultSessionStack() # pylint: disable=protected-access def g(incoming_data=None, oid_type="int64", directed=True, generate_eid=True): return get_default_session().g(incoming_data, oid_type, directed, generate_eid)
[ "logging.getLogger", "graphscope.client.rpc.GRPCClient", "time.sleep", "graphscope.interactive.query.InteractiveQuery", "graphscope.proto.op_def_pb2.DagDef", "graphscope.framework.errors.InvalidArgumentError", "json.dumps", "atexit.register", "os.path.expanduser", "graphscope.deploy.hosts.cluster.HostsClusterLauncher", "graphscope.client.utils.CaptureKeyboardInterrupt", "graphscope.framework.errors.K8sError", "graphscope.client.utils.set_defaults", "graphscope.load_from", "graphscope.client.utils.GSLogger.update", "graphscope.framework.errors.InteractiveEngineInternalError", "graphscope.learning.graph.Graph.preprocess_args", "kubernetes.config.new_client_from_config", "graphscope.framework.graph.Graph", "graphscope.learning.graph.Graph", "json.load", "threading.Thread" ]
[((2378, 2409), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (2395, 2409), False, 'import logging\n'), ((2297, 2345), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.graphscope/session.json"""'], {}), "('~/.graphscope/session.json')\n", (2315, 2345), False, 'import os\n'), ((4779, 4802), 'graphscope.client.utils.set_defaults', 'set_defaults', (['gs_config'], {}), '(gs_config)\n', (4791, 4802), False, 'from graphscope.client.utils import set_defaults\n'), ((39313, 39336), 'graphscope.client.utils.set_defaults', 'set_defaults', (['gs_config'], {}), '(gs_config)\n', (39325, 39336), False, 'from graphscope.client.utils import set_defaults\n'), ((46774, 46791), 'graphscope.client.utils.GSLogger.update', 'GSLogger.update', ([], {}), '()\n', (46789, 46791), False, 'from graphscope.client.utils import GSLogger\n'), ((19998, 20025), 'atexit.register', 'atexit.register', (['self.close'], {}), '(self.close)\n', (20013, 20025), False, 'import atexit\n'), ((20285, 20339), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._send_heartbeat', 'args': '()'}), '(target=self._send_heartbeat, args=())\n', (20301, 20339), False, 'import threading\n'), ((27310, 27329), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (27327, 27329), False, 'from graphscope.proto import op_def_pb2\n'), ((33098, 33136), 'graphscope.client.rpc.GRPCClient', 'GRPCClient', (['self._coordinator_endpoint'], {}), '(self._coordinator_endpoint)\n', (33108, 33136), False, 'from graphscope.client.rpc import GRPCClient\n'), ((34338, 34398), 'graphscope.framework.graph.Graph', 'Graph', (['self', 'incoming_data', 'oid_type', 'directed', 'generate_eid'], {}), '(self, incoming_data, oid_type, directed, generate_eid)\n', (34343, 34398), False, 'from graphscope.framework.graph import Graph\n'), ((39204, 39222), 'json.dumps', 'json.dumps', (['handle'], {}), '(handle)\n', (39214, 39222), False, 'import json\n'), ((42138, 42197), 'graphscope.interactive.query.InteractiveQuery', 'InteractiveQuery', ([], {'session': 'self', 'object_id': 'graph.vineyard_id'}), '(session=self, object_id=graph.vineyard_id)\n', (42154, 42197), False, 'from graphscope.interactive.query import InteractiveQuery\n'), ((44793, 44856), 'graphscope.learning.graph.Graph.preprocess_args', 'LearningGraph.preprocess_args', (['handle', 'nodes', 'edges', 'gen_labels'], {}), '(handle, nodes, edges, gen_labels)\n', (44822, 44856), True, 'from graphscope.learning.graph import Graph as LearningGraph\n'), ((45246, 45300), 'graphscope.learning.graph.Graph', 'LearningGraph', (['handle', 'config', 'graph.vineyard_id', 'self'], {}), '(handle, config, graph.vineyard_id, self)\n', (45259, 45300), True, 'from graphscope.learning.graph import Graph as LearningGraph\n'), ((20076, 20112), 'graphscope.client.utils.CaptureKeyboardInterrupt', 'CaptureKeyboardInterrupt', (['self.close'], {}), '(self.close)\n', (20100, 20112), False, 'from graphscope.client.utils import CaptureKeyboardInterrupt\n'), ((20729, 20753), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (20747, 20753), False, 'import os\n'), ((23064, 23108), 'time.sleep', 'time.sleep', (['self._heartbeat_interval_seconds'], {}), '(self._heartbeat_interval_seconds)\n', (23074, 23108), False, 'import time\n'), ((28718, 28790), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('Not recognized output type: %s' % op.output_types)"], {}), "('Not recognized output type: %s' % op.output_types)\n", (28738, 28790), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((34615, 34652), 'graphscope.load_from', 'graphscope.load_from', (['*args'], {}), '(*args, **kwargs)\n', (34635, 34652), False, 'import graphscope\n'), ((36812, 36871), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph has already been unloaded"""'], {}), "('The graph has already been unloaded')\n", (36832, 36871), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((36951, 37012), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph should be a property graph."""'], {}), "('The graph should be a property graph.')\n", (36971, 37012), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((41908, 41967), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph has already been unloaded"""'], {}), "('The graph has already been unloaded')\n", (41928, 41967), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((42047, 42108), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph should be a property graph."""'], {}), "('The graph should be a property graph.')\n", (42067, 42108), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((44460, 44519), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph has already been unloaded"""'], {}), "('The graph has already been unloaded')\n", (44480, 44519), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((44599, 44660), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""The graph should be a property graph."""'], {}), "('The graph should be a property graph.')\n", (44619, 44660), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((20837, 20849), 'json.load', 'json.load', (['f'], {}), '(f)\n', (20846, 20849), False, 'import json\n'), ((29298, 29376), 'kubernetes.config.new_client_from_config', 'kube_config.new_client_from_config', ([], {}), "(**self._config_params['k8s_client_config'])\n", (29332, 29376), True, 'from kubernetes import config as kube_config\n'), ((29243, 29272), 'graphscope.framework.errors.K8sError', 'K8sError', (['"""None image found."""'], {}), "('None image found.')\n", (29251, 29272), False, 'from graphscope.framework.errors import K8sError\n'), ((32425, 32689), 'graphscope.deploy.hosts.cluster.HostsClusterLauncher', 'HostsClusterLauncher', ([], {'hosts': "self._config_params['hosts']", 'port': "self._config_params['port']", 'num_workers': "self._config_params['num_workers']", 'vineyard_socket': "self._config_params['vineyard_socket']", 'timeout_seconds': "self._config_params['timeout_seconds']"}), "(hosts=self._config_params['hosts'], port=self.\n _config_params['port'], num_workers=self._config_params['num_workers'],\n vineyard_socket=self._config_params['vineyard_socket'], timeout_seconds\n =self._config_params['timeout_seconds'])\n", (32445, 32689), False, 'from graphscope.deploy.hosts.cluster import HostsClusterLauncher\n'), ((41222, 41281), 'graphscope.framework.errors.InteractiveEngineInternalError', 'InteractiveEngineInternalError', (['interactive_query.error_msg'], {}), '(interactive_query.error_msg)\n', (41252, 41281), False, 'from graphscope.framework.errors import InteractiveEngineInternalError\n'), ((41484, 41497), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41494, 41497), False, 'import time\n'), ((44891, 44909), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (44901, 44909), False, 'import json\n'), ((41744, 41803), 'graphscope.framework.errors.InteractiveEngineInternalError', 'InteractiveEngineInternalError', (['interactive_query.error_msg'], {}), '(interactive_query.error_msg)\n', (41774, 41803), False, 'from graphscope.framework.errors import InteractiveEngineInternalError\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/readwrite/adjlist.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # from numbers import Number # fmt: off from networkx.drawing.nx_pylab import draw as _draw from networkx.drawing.nx_pylab import draw_networkx as _draw_networkx from networkx.drawing.nx_pylab import \ draw_networkx_edge_labels as _draw_networkx_edge_labels from networkx.drawing.nx_pylab import draw_networkx_edges as _draw_networkx_edges from networkx.drawing.nx_pylab import draw_networkx_labels as _draw_networkx_labels from networkx.drawing.nx_pylab import draw_networkx_nodes as _draw_networkx_nodes from graphscope import nx from graphscope.nx.drawing.layout import circular_layout from graphscope.nx.drawing.layout import kamada_kawai_layout from graphscope.nx.drawing.layout import planar_layout from graphscope.nx.drawing.layout import random_layout from graphscope.nx.drawing.layout import shell_layout from graphscope.nx.drawing.layout import spectral_layout from graphscope.nx.drawing.layout import spring_layout from graphscope.nx.utils.compat import with_graphscope_nx_context # fmt: on __all__ = [ "draw", "draw_networkx", "draw_networkx_nodes", "draw_networkx_edges", "draw_networkx_labels", "draw_networkx_edge_labels", "draw_circular", "draw_kamada_kawai", "draw_random", "draw_spectral", "draw_spring", "draw_planar", "draw_shell", ] def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): """Apply an alpha (or list of alphas) to the colors provided. Parameters ---------- colors : color string, or array of floats Color of element. Can be a single color format string (default='r'), or a sequence of colors with the same length as nodelist. If numeric values are specified they will be mapped to colors using the cmap and vmin,vmax parameters. See matplotlib.scatter for more details. alpha : float or array of floats Alpha values for elements. This can be a single alpha value, in which case it will be applied to all the elements of color. Otherwise, if it is an array, the elements of alpha will be applied to the colors in order (cycling through alpha multiple times if necessary). elem_list : array of networkx objects The list of elements which are being colored. These could be nodes, edges or labels. cmap : matplotlib colormap Color map for use if colors is a list of floats corresponding to points on a color mapping. vmin, vmax : float Minimum and maximum values for normalizing colors if a color mapping is used. Returns ------- rgba_colors : numpy ndarray Array containing RGBA format values for each of the node colours. """ from itertools import cycle from itertools import islice try: import matplotlib.cm as cm import numpy as np from matplotlib.colors import colorConverter except ImportError as e: raise ImportError("Matplotlib required for draw()") from e # If we have been provided with a list of numbers as long as elem_list, # apply the color mapping. if len(colors) == len(elem_list) and isinstance(colors[0], Number): mapper = cm.ScalarMappable(cmap=cmap) mapper.set_clim(vmin, vmax) rgba_colors = mapper.to_rgba(colors) # Otherwise, convert colors to matplotlib's RGB using the colorConverter # object. These are converted to numpy ndarrays to be consistent with the # to_rgba method of ScalarMappable. else: try: rgba_colors = np.array([colorConverter.to_rgba(colors)]) except ValueError: rgba_colors = np.array([colorConverter.to_rgba(color) for color in colors]) # Set the final column of the rgba_colors to have the relevant alpha values try: # If alpha is longer than the number of colors, resize to the number of # elements. Also, if rgba_colors.size (the number of elements of # rgba_colors) is the same as the number of elements, resize the array, # to avoid it being interpreted as a colormap by scatter() if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list): rgba_colors = np.resize(rgba_colors, (len(elem_list), 4)) rgba_colors[1:, 0] = rgba_colors[0, 0] rgba_colors[1:, 1] = rgba_colors[0, 1] rgba_colors[1:, 2] = rgba_colors[0, 2] rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) except TypeError: rgba_colors[:, -1] = alpha return rgba_colors @with_graphscope_nx_context(_draw_networkx_nodes) def draw_networkx_nodes( G, pos, nodelist=None, node_size=300, node_color="#1f78b4", node_shape="o", alpha=None, cmap=None, vmin=None, vmax=None, ax=None, linewidths=None, edgecolors=None, label=None, ): pass @with_graphscope_nx_context(_draw_networkx_edges) def draw_networkx_edges( G, pos, edgelist=None, width=1.0, edge_color="k", style="solid", alpha=None, arrowstyle="-|>", arrowsize=10, edge_cmap=None, edge_vmin=None, edge_vmax=None, ax=None, arrows=True, label=None, node_size=300, nodelist=None, node_shape="o", connectionstyle=None, min_source_margin=0, min_target_margin=0, ): pass @with_graphscope_nx_context(_draw_networkx_labels) def draw_networkx_labels( G, pos, labels=None, font_size=12, font_color="k", font_family="sans-serif", font_weight="normal", alpha=None, bbox=None, horizontalalignment="center", verticalalignment="center", ax=None, ): pass @with_graphscope_nx_context(_draw) def draw(G, pos=None, ax=None, **kwds): pass @with_graphscope_nx_context(_draw_networkx) def draw_networkx(G, pos=None, arrows=True, with_labels=True, **kwds): pass @with_graphscope_nx_context(_draw_networkx_edge_labels) def draw_networkx_edge_labels( G, pos, edge_labels=None, label_pos=0.5, font_size=10, font_color="k", font_family="sans-serif", font_weight="normal", alpha=None, bbox=None, horizontalalignment="center", verticalalignment="center", ax=None, rotate=True, ): pass def draw_circular(G, **kwargs): draw(G, circular_layout(G), **kwargs) def draw_kamada_kawai(G, **kwargs): draw(G, kamada_kawai_layout(G), **kwargs) def draw_random(G, **kwargs): draw(G, random_layout(G), **kwargs) def draw_spectral(G, **kwargs): draw(G, spectral_layout(G), **kwargs) def draw_spring(G, **kwargs): draw(G, spring_layout(G), **kwargs) def draw_shell(G, **kwargs): nlist = kwargs.get("nlist", None) if nlist is not None: del kwargs["nlist"] draw(G, shell_layout(G, nlist=nlist), **kwargs) def draw_planar(G, **kwargs): draw(G, planar_layout(G), **kwargs)
[ "itertools.cycle", "graphscope.nx.drawing.layout.circular_layout", "graphscope.nx.drawing.layout.spectral_layout", "graphscope.nx.drawing.layout.kamada_kawai_layout", "graphscope.nx.drawing.layout.spring_layout", "graphscope.nx.drawing.layout.planar_layout", "matplotlib.colors.colorConverter.to_rgba", "graphscope.nx.utils.compat.with_graphscope_nx_context", "matplotlib.cm.ScalarMappable", "graphscope.nx.drawing.layout.random_layout", "graphscope.nx.drawing.layout.shell_layout" ]
[((5044, 5092), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_nodes'], {}), '(_draw_networkx_nodes)\n', (5070, 5092), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((5365, 5413), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_edges'], {}), '(_draw_networkx_edges)\n', (5391, 5413), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((5839, 5888), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_labels'], {}), '(_draw_networkx_labels)\n', (5865, 5888), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((6167, 6200), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw'], {}), '(_draw)\n', (6193, 6200), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((6253, 6295), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx'], {}), '(_draw_networkx)\n', (6279, 6295), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((6379, 6433), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_draw_networkx_edge_labels'], {}), '(_draw_networkx_edge_labels)\n', (6405, 6433), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((3682, 3710), 'matplotlib.cm.ScalarMappable', 'cm.ScalarMappable', ([], {'cmap': 'cmap'}), '(cmap=cmap)\n', (3699, 3710), True, 'import matplotlib.cm as cm\n'), ((6801, 6819), 'graphscope.nx.drawing.layout.circular_layout', 'circular_layout', (['G'], {}), '(G)\n', (6816, 6819), False, 'from graphscope.nx.drawing.layout import circular_layout\n'), ((6881, 6903), 'graphscope.nx.drawing.layout.kamada_kawai_layout', 'kamada_kawai_layout', (['G'], {}), '(G)\n', (6900, 6903), False, 'from graphscope.nx.drawing.layout import kamada_kawai_layout\n'), ((6959, 6975), 'graphscope.nx.drawing.layout.random_layout', 'random_layout', (['G'], {}), '(G)\n', (6972, 6975), False, 'from graphscope.nx.drawing.layout import random_layout\n'), ((7033, 7051), 'graphscope.nx.drawing.layout.spectral_layout', 'spectral_layout', (['G'], {}), '(G)\n', (7048, 7051), False, 'from graphscope.nx.drawing.layout import spectral_layout\n'), ((7107, 7123), 'graphscope.nx.drawing.layout.spring_layout', 'spring_layout', (['G'], {}), '(G)\n', (7120, 7123), False, 'from graphscope.nx.drawing.layout import spring_layout\n'), ((7270, 7298), 'graphscope.nx.drawing.layout.shell_layout', 'shell_layout', (['G'], {'nlist': 'nlist'}), '(G, nlist=nlist)\n', (7282, 7298), False, 'from graphscope.nx.drawing.layout import shell_layout\n'), ((7354, 7370), 'graphscope.nx.drawing.layout.planar_layout', 'planar_layout', (['G'], {}), '(G)\n', (7367, 7370), False, 'from graphscope.nx.drawing.layout import planar_layout\n'), ((4928, 4940), 'itertools.cycle', 'cycle', (['alpha'], {}), '(alpha)\n', (4933, 4940), False, 'from itertools import cycle\n'), ((4047, 4077), 'matplotlib.colors.colorConverter.to_rgba', 'colorConverter.to_rgba', (['colors'], {}), '(colors)\n', (4069, 4077), False, 'from matplotlib.colors import colorConverter\n'), ((4143, 4172), 'matplotlib.colors.colorConverter.to_rgba', 'colorConverter.to_rgba', (['color'], {}), '(color)\n', (4165, 4172), False, 'from matplotlib.colors import colorConverter\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import networkx.readwrite.tests.test_gml import pytest from networkx.readwrite.tests.test_gml import TestGraph from graphscope import nx from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx( networkx.readwrite.tests.test_gml, decorators=pytest.mark.usefixtures("graphscope_session"), ) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGraph) class TestGraph: def test_tuplelabels(self): # https://github.com/networkx/networkx/pull/1048 # Writing tuple labels to GML failed. G = nx.Graph() G.add_edge((0, 1), (1, 0)) data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) answer = """graph [ node [ id 0 label "(0,1)" ] node [ id 1 label "(1,0)" ] edge [ source 0 target 1 ] ]""" assert data == answer def test_data_types(self): # NB: json can't use tuple, byte as key data = [ True, False, 10**10, # 10 ** 20 overflow on folly::dynamic -2e33, "'", '"&&amp;&&#34;"', [{"\xfd": "\x7f", chr(0x4444): [1, 2]}, [2, "3"]], ] try: # fails under IronPython data.append(chr(0x14444)) except ValueError: data.append(chr(0x1444)) G = nx.Graph() G.name = data G.graph["data"] = data print(dict(data=data)) G.add_node(0, int=-1, data=dict(data=data)) G.add_edge(0, 0, float=-2.5, data=data) gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) G = nx.parse_gml(gml, destringizer=literal_destringizer) assert data == G.name assert {"name": data, "data": data} == G.graph assert list(G.nodes(data=True)) == [(0, dict(int=-1, data=dict(data=data)))] assert list(G.edges(data=True)) == [(0, 0, dict(float=-2.5, data=data))] G = nx.Graph() G.graph["data"] = "frozenset([1, 2, 3])" G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval) assert G.graph["data"] == "frozenset([1, 2, 3])" def test_tuplelabels(self): # https://github.com/networkx/networkx/pull/1048 # Writing tuple labels to GML failed. G = nx.Graph() G.add_edge((0, 1), (1, 0)) data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) answer = ( """graph [ node [ id 0 label "(0,1)" ] node [ id 1 label "(1,0)" ] edge [ source 0 target 1 ] ]""", """graph [ node [ id 0 label "(1,0)" ] node [ id 1 label "(0,1)" ] edge [ source 0 target 1 ] ]""", ) assert data in answer @pytest.mark.skip( reason="the folly json serialization does not support to keep the decimal point in SHORTEST mode, keep record on issue #1167" ) def test_float_label(self): node = 1.0 G = nx.Graph() G.add_node(node) fobj = tempfile.NamedTemporaryFile() nx.write_gml(G, fobj) fobj.seek(0) # Should be bytes in 2.x and 3.x data = fobj.read().strip().decode("ascii") answer = """graph [ node [ id 0 label "1" ] ]""" assert data == answer @pytest.mark.skip(reason="rapidjson not support inf.") def test_special_float_label(self): pass
[ "pytest.mark.skip", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures", "graphscope.nx.write_gml", "graphscope.nx.generate_gml", "graphscope.nx.parse_gml", "graphscope.nx.Graph" ]
[((799, 844), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (822, 844), False, 'import pytest\n'), ((846, 883), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGraph'], {}), '(TestGraph)\n', (872, 883), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((3255, 3408), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""the folly json serialization does not support to keep the decimal point in SHORTEST mode, keep record on issue #1167"""'}), "(reason=\n 'the folly json serialization does not support to keep the decimal point in SHORTEST mode, keep record on issue #1167'\n )\n", (3271, 3408), False, 'import pytest\n'), ((3805, 3858), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""rapidjson not support inf."""'}), "(reason='rapidjson not support inf.')\n", (3821, 3858), False, 'import pytest\n'), ((747, 792), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (770, 792), False, 'import pytest\n'), ((1048, 1058), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1056, 1058), False, 'from graphscope import nx\n'), ((1837, 1847), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1845, 1847), False, 'from graphscope import nx\n'), ((2119, 2171), 'graphscope.nx.parse_gml', 'nx.parse_gml', (['gml'], {'destringizer': 'literal_destringizer'}), '(gml, destringizer=literal_destringizer)\n', (2131, 2171), False, 'from graphscope import nx\n'), ((2435, 2445), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (2443, 2445), False, 'from graphscope import nx\n'), ((2772, 2782), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (2780, 2782), False, 'from graphscope import nx\n'), ((3476, 3486), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (3484, 3486), False, 'from graphscope import nx\n'), ((3565, 3586), 'graphscope.nx.write_gml', 'nx.write_gml', (['G', 'fobj'], {}), '(G, fobj)\n', (3577, 3586), False, 'from graphscope import nx\n'), ((1119, 1168), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {'stringizer': 'literal_stringizer'}), '(G, stringizer=literal_stringizer)\n', (1134, 1168), False, 'from graphscope import nx\n'), ((2056, 2105), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {'stringizer': 'literal_stringizer'}), '(G, stringizer=literal_stringizer)\n', (2071, 2105), False, 'from graphscope import nx\n'), ((2520, 2538), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {}), '(G)\n', (2535, 2538), False, 'from graphscope import nx\n'), ((2843, 2892), 'graphscope.nx.generate_gml', 'nx.generate_gml', (['G'], {'stringizer': 'literal_stringizer'}), '(G, stringizer=literal_stringizer)\n', (2858, 2892), False, 'from graphscope import nx\n')]
import networkx.generators.tests.test_geometric import pytest from networkx.generators.tests.test_geometric import TestNavigableSmallWorldGraph from graphscope.framework.errors import UnimplementedError from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx( networkx.generators.tests.test_geometric, decorators=pytest.mark.usefixtures("graphscope_session"), ) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestNavigableSmallWorldGraph) class TestNavigableSmallWorldGraph: def test_navigable_small_world(self): with pytest.raises(UnimplementedError): G = nx.navigable_small_world_graph(5, p=1, q=0, seed=42)
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.raises", "pytest.mark.usefixtures" ]
[((472, 517), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (495, 517), False, 'import pytest\n'), ((519, 575), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestNavigableSmallWorldGraph'], {}), '(TestNavigableSmallWorldGraph)\n', (545, 575), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((420, 465), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (443, 465), False, 'import pytest\n'), ((667, 700), 'pytest.raises', 'pytest.raises', (['UnimplementedError'], {}), '(UnimplementedError)\n', (680, 700), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pytest from networkx.exception import NetworkXError import graphscope import graphscope.nx as nx from graphscope.client.session import g from graphscope.client.session import get_default_session from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.loader import Loader from graphscope.proto import graph_def_pb2 def ldbc_sample_single_label(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_string_oid(prefix, directed): graph = graphscope.g(directed=directed, oid_type="string", generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_single_label_with_sess(sess, prefix, directed): graph = sess.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_multi_labels(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = ( graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment", ["creationDate", "locationIP", "browserUsed", "content", "length"], ) .add_vertices( Loader(os.path.join(prefix, "person_0_0.csv"), delimiter="|"), "person", [ "firstName", "lastName", "gender", ("birthday", str), "creationDate", "locationIP", "browserUsed", ], ) .add_vertices( Loader(os.path.join(prefix, "post_0_0.csv"), delimiter="|"), "post", [ "imageFile", "creationDate", "locationIP", "browserUsed", "language", "content", "length", ], ) ) graph = ( graph.add_edges( Loader( os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|" ), "replyOf", src_label="comment", dst_label="comment", ) .add_edges( Loader(os.path.join(prefix, "person_knows_person_0_0.csv"), delimiter="|"), "knows", ["creationDate"], src_label="person", dst_label="person", ) .add_edges( Loader(os.path.join(prefix, "comment_replyOf_post_0_0.csv"), delimiter="|"), "replyOf2", src_label="comment", dst_label="post", ) ) return graph def load_p2p(prefix, directed): graph = graphscope.load_from( edges={ "group": { "loader": Loader( os.path.join(prefix, "p2p-31.e"), header_row=False, delimiter=" " ) } }, directed=directed, generate_eid=False, ) return graph @pytest.mark.usefixtures("graphscope_session") class TestGraphTransformation(object): @classmethod def setup_class(cls): cls.NXGraph = nx.Graph cls.data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample") cls.single_label_g = ldbc_sample_single_label(cls.data_dir, False) cls.multi_label_g = ldbc_sample_multi_labels(cls.data_dir, False) cls.p2p = load_p2p(os.path.expandvars("${GS_TEST_DIR}"), False) cls.p2p_nx = nx.read_edgelist( os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"), nodetype=int, data=True, ) cls.str_oid_g = ldbc_sample_string_oid(cls.data_dir, False) @classmethod def teardown_class(cls): cls.single_label_g.unload() cls.multi_label_g.unload() cls.str_oid_g.unload() def assert_convert_success(self, gs_g, nx_g): assert gs_g.is_directed() == nx_g.is_directed() assert self._schema_equal(gs_g.schema, nx_g.schema) def _schema_equal(self, gs_schema, nx_schema): v_props = {} for entry in gs_schema._valid_vertex_labels(): for prop in entry.properties: v_props[prop.name] = prop.type e_props = {} for entry in gs_schema._valid_edge_labels(): for prop in entry.properties: e_props[prop.name] = prop.type gs_v_props = { prop.name: prop.type for prop in list(nx_schema._valid_vertex_labels())[0].properties } gs_e_props = { prop.name: prop.type for prop in list(nx_schema._valid_edge_labels())[0].properties } return v_props == gs_v_props and e_props == gs_e_props # nx to gs def test_empty_nx_to_gs(self): empty_nx_g = self.NXGraph(dist=True) gs_g = g(empty_nx_g) self.assert_convert_success(gs_g, empty_nx_g) def test_only_contains_nodes_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(100), type="node") gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_simple_nx_to_gs(self): nx_g = nx.complete_graph(10, create_using=self.NXGraph) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_int_node_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(10), foo="star") nx_g.add_edges_from( [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8), (8, 9)], weight=3.14, ) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_str_node_nx_to_gs(self): nx_g = nx.les_miserables_graph() gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_complete_nx_to_gs(self): # multi-propery, node propery and edge propty both aligned nodes = [ (0, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (1, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (2, {"vp1": 1, "vp2": "v", "vp3": 3.14}), ] edges = [ (0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), ] nx_g = self.NXGraph(dist=True) nx_g.update(edges, nodes) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # node property aliged, edge not aliged nx_g2 = nx_g.copy() nx_g2.add_edge(0, 1, ep4="new propery") gs_g2 = g(nx_g2) self.assert_convert_success(gs_g2, nx_g2) # edge property aliged, node not aliged nx_g3 = nx_g.copy() nx_g3.add_node(2, vp4="new propery") gs_g3 = g(nx_g3) self.assert_convert_success(gs_g3, nx_g3) # both not aliged nx_g4 = nx_g.copy() nx_g4.add_edge(0, 1, ep4="new propery") nx_g4.add_node(2, vp4="new propery") gs_g4 = g(nx_g4) self.assert_convert_success(gs_g4, nx_g4) def test_nx_to_gs_after_modify(self): nx_g = self.NXGraph(dist=True) nodes = [ (0, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (1, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (2, {"vp1": 1, "vp2": "v", "vp3": 3.14}), ] # add nodes nx_g.add_nodes_from(nodes) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # add_edges edges = [ (0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), ] nx_g.add_edges_from(edges) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # remove edge nx_g.remove_edge(0, 1) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # remove node nx_g.remove_node(0) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # clear nx_g.clear() gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_nx_to_gs_remove_nodes(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(10)) # all nodes are int gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # success nx_g.add_node("str_node") # add a str node with pytest.raises( RuntimeError, match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph", ): gs_g = g(nx_g) # mixing oid type, failed nx_g.remove_node("str_node") # remove str node, all nodes are int again gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # success def test_error_on_view_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g._graph = None # graph view always has a _graph attribute nx_g._is_client_view = False with pytest.raises(TypeError, match="graph view can not convert to gs graph"): gs_g = g(nx_g) def test_error_on_mixing_node_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_node(0, weight=1.23) nx_g.add_node("zakky", foo="node") with pytest.raises( RuntimeError, match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph", ): gs_g = g(nx_g) # gs to nx def test_empty_gs_to_nx(self): empty_nx = self.NXGraph(dist=True) empty_gs_graph = g(empty_nx) G = self.NXGraph(empty_gs_graph) self.assert_convert_success(empty_gs_graph, G) def test_single_label_gs_to_nx(self): G = self.NXGraph(self.single_label_g) assert G.number_of_nodes() == 76830 assert G.number_of_edges() == 38786 assert 618475290625 not in G assert ("comment", 618475290625) in G G2 = self.NXGraph(self.single_label_g, default_label="comment") assert G2.number_of_nodes() == 76830 assert G2.number_of_edges() == 38786 assert 618475290625 in G2 assert ("comment", 618475290625) not in G2 def test_multi_label_gs_to_nx(self): G = self.NXGraph(self.multi_label_g) assert G.number_of_nodes() == (76830 + 903 + 78976) assert G.number_of_edges() == (38786 + 6626 + 38044) assert 618475290625 not in G # comment node is (label, id) format assert ("comment", 618475290625) in G assert 933 not in G # person node is (label, id) format assert ("person", 933) in G assert 618475290624 not in G # post node is (label, id) format assert ("post", 618475290624) in G G2 = self.NXGraph(self.multi_label_g, default_label="comment") assert G2.number_of_nodes() == (76830 + 903 + 78976) assert G2.number_of_edges() == (38786 + 6626 + 38044) assert 618475290625 in G2 # comment node is default label node assert ("comment", 618475290625) not in G2 assert 933 not in G2 # person node is (label, id) format assert ("person", 933) in G2 assert 618475290624 not in G2 # post node is (label, id) format assert ("post", 618475290624) in G @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) == "standalone", reason="FIXME(weibin): ci runner failed", ) def test_report_methods_on_copy_on_write_strategy(self): G = self.NXGraph(self.multi_label_g, default_label="person") assert G.graph_type == graph_def_pb2.ARROW_PROPERTY # test NODE_NUM and EDGE_NUM assert G.number_of_nodes() == (76830 + 903 + 78976) assert G.number_of_edges() == (38786 + 6626 + 38044) # test HAS_NODE and HAS_EDGE assert 0 not in G assert 933 in G assert ("person", 933) not in G # deault node must be non-tuple format assert ("random", 933) not in G assert G.has_edge(933, 4398046511628) assert G.has_edge(("comment", 618475290625), ("post", 618475290624)) assert not G.has_edge(933, ("post", 618475290624)) # test GET_NODE_DATA and GET_EDGE_DATA assert G.get_node_data(933) == { "browserUsed": "Firefox", "locationIP": "172.16.58.3", "creationDate": "2010-02-14T15:32:10.447+0000", "birthday": "1989-12-03", "gender": "male", "lastName": "Perera", "firstName": "Mahinda", } assert G.get_edge_data(933, 4398046511628) == { "creationDate": "2010-07-30T15:19:53.298+0000", } assert sorted(list(G.neighbors(933))) == [ 4398046511628, 8796093023017, 28587302322537, ] if G.is_directed(): assert sorted(list(G.predecessors(4398046511628))) == [ 318, 933, 987, 2199023256530, ] G.add_node(0) # modify graph to make copy on write assert G.graph_type == graph_def_pb2.DYNAMIC_PROPERTY assert G.number_of_nodes() == (76831 + 903 + 78976) assert G.number_of_edges() == (38786 + 6626 + 38044) # test HAS_NODE and HAS_EDGE assert 0 in G assert 933 in G assert ("person", 933) not in G assert ("random", 933) not in G assert G.has_edge(933, 4398046511628) assert G.has_edge(("comment", 618475290625), ("post", 618475290624)) assert not G.has_edge(618475290625, ("post", 618475290624)) # test GET_NODE_DATA and GET_EDGE_DATA assert G.get_node_data(933) == { "browserUsed": "Firefox", "locationIP": "172.16.58.3", "creationDate": "2010-02-14T15:32:10.447+0000", "birthday": "1989-12-03", "gender": "male", "lastName": "Perera", "firstName": "Mahinda", } assert G.get_edge_data(933, 4398046511628) == { "creationDate": "2010-07-30T15:19:53.298+0000", } assert sorted(list(G.neighbors(933))) == [ 4398046511628, 8796093023017, 28587302322537, ] if G.is_directed(): assert sorted(list(G.predecessors(4398046511628))) == [ 318, 933, 987, 2199023256530, ] def test_str_oid_gs_to_nx(self): g = self.str_oid_g nx_g = self.NXGraph(g, default_label="comment") assert "618475290625" in nx_g self.assert_convert_success(g, nx_g) @pytest.mark.skip(reason="TODO: open after supporting run app on arrow_property") def test_gs_to_nx_with_sssp(self): nx_g = self.NXGraph(self.p2p) ret = nx.builtin.single_source_dijkstra_path_length(nx_g, 6, weight="f2") ret2 = nx.builtin.single_source_dijkstra_path_length( self.p2p_nx, 6, weight="weight" ) assert ret == ret2 def test_error_on_wrong_nx_type(self): g = self.single_label_g with pytest.raises(NetworkXError): nx_g = nx.DiGraph(g) @pytest.mark.skip(reason="FIXME: multiple session crash in ci.") def test_multiple_sessions(self): sess2 = graphscope.session(cluster_type="hosts", num_workers=1) nx2 = sess2.nx() gs_g = self.single_label_g if self.NXGraph is nx.Graph: gs_g2 = ldbc_sample_single_label_with_sess(sess2, self.data_dir, False) else: gs_g2 = ldbc_sample_single_label_with_sess(sess2, self.data_dir, True) assert gs_g.session_id != gs_g2.session_id nx_g = self.NXGraph(gs_g, dist=True) if nx_g.is_directed(): nx_g2 = nx2.DiGraph(gs_g2, dist=True) else: nx_g2 = nx2.Graph(gs_g2, dist=True) self.assert_convert_success(gs_g2, nx_g2) assert nx_g.session_id == gs_g.session_id assert nx_g2.session_id == gs_g2.session_id # copies cg1 = nx_g2.copy() assert cg1.session_id == nx_g2.session_id dg1 = nx_g2.to_directed() assert dg1.session_id == nx_g2.session_id dg2 = nx_g2.to_directed(as_view=True) assert dg2.session_id == nx_g2.session_id # subgraph sg1 = nx_g2.subgraph([274877907301, 274877907299]) assert sg1.session_id == nx_g2.session_id sg2 = nx_g2.edge_subgraph([(274877907301, 274877907299)]) assert sg2.session_id == nx_g2.session_id # error raise if gs graph and nx graph not in the same session. with pytest.raises( RuntimeError, match="graphscope graph and networkx graph not in the same session.", ): tmp = self.NXGraph(gs_g2) with pytest.raises( RuntimeError, match="networkx graph and graphscope graph not in the same session.", ): tmp = g(nx_g2) print(tmp.session_id, nx_g2.session_id) sess2.close() @pytest.mark.usefixtures("graphscope_session") class TestGraphProjectTest(object): @classmethod def setup_class(cls): cls.NXGraph = nx.Graph edgelist = os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist") cls.g = nx.read_edgelist( edgelist, nodetype=int, data=True, create_using=cls.NXGraph ) cls.g.add_node(1, vdata_str="kdjfao") cls.g.add_node(1, vdata_int=123) def test_project_to_simple(self): # default, e_prop='', v_prop='' sg1 = self.g._project_to_simple() assert ( sg1.schema.vdata_type == graph_def_pb2.NULLVALUE and sg1.schema.edata_type == graph_def_pb2.NULLVALUE ) # to_simple with e_prop sg2 = self.g._project_to_simple(e_prop="edata_float") assert ( sg2.schema.vdata_type == graph_def_pb2.NULLVALUE and sg2.schema.edata_type == graph_def_pb2.DOUBLE ) # to_simple with v_prop sg3 = self.g._project_to_simple(v_prop="vdata_str") assert ( sg3.schema.vdata_type == graph_def_pb2.STRING and sg3.schema.edata_type == graph_def_pb2.NULLVALUE ) # to_simple with e_prop and v_prop sg4 = self.g._project_to_simple(v_prop="vdata_int", e_prop="edata_str") assert ( sg4.schema.vdata_type == graph_def_pb2.LONG and sg4.schema.edata_type == graph_def_pb2.STRING ) # empty graph to simple empty_g = self.NXGraph() sg5 = empty_g._project_to_simple() assert ( sg5.schema.vdata_type == graph_def_pb2.NULLVALUE and sg5.schema.edata_type == graph_def_pb2.NULLVALUE ) with pytest.raises( InvalidArgumentError, match="graph not contains the vertex property foo" ): sg6 = empty_g._project_to_simple(v_prop="foo") @pytest.mark.skip(reason="It use much memory, exceeds the limit of Github runner") def test_implicit_project_to_simple(self): g = self.g nx.builtin.degree_centrality(g) nx.builtin.single_source_dijkstra_path_length(g, source=6, weight="weight") def test_error_on_not_exist_vertex_property(self): g = self.NXGraph() g.add_node(0, foo="node") with pytest.raises( InvalidArgumentError, match="graph not contains the vertex property weight" ): sg = g._project_to_simple(v_prop="weight") def test_error_on_not_exist_edge_property(self): g = self.NXGraph() g.add_edge(0, 1, weight=3) with pytest.raises( InvalidArgumentError, match="graph not contains the edge property type" ): sg = g._project_to_simple(e_prop="type") @pytest.mark.skip(reason="FIXME: engine can not catch the app throw error now") def test_error_on_some_edges_not_contain_property(self): g = self.g # some edges not contain the property with pytest.raises(RuntimeError): nx.builtin.single_source_dijkstra_path_length( g, source=6, weight="edata_random_int_0" ) @pytest.mark.skip(reason="FIXME: engine can not catch the app throw error now") def test_error_on_some_edges_has_wrong_type(self): g = self.g.copy() # set edge a wrong type g[6][42]["weight"] = "a str" with pytest.raises(RuntimeError): nx.builtin.single_source_dijkstra_path_length(g, source=6, weight="weight") @pytest.mark.skip(reason="find a algorithm that use vertex data") def test_error_on_some_nodes_not_contain_property(self): g = self.g with pytest.raises(RuntimeError): nx.builtin.sssp(weight="vdata_random_int_0") @pytest.mark.skip(reason="find a algorithm that use vertex data") def test_error_on_some_nodes_has_wrong_type(self): g = self.g.copy() g[0]["weight"] = "a str" with pytest.raises(RuntimeError): nx.builtin.sssp(weight="weight") @pytest.mark.usefixtures("graphscope_session") class TestDigraphTransformation(TestGraphTransformation): @classmethod def setup_class(cls): cls.NXGraph = nx.DiGraph data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample") cls.single_label_g = ldbc_sample_single_label(data_dir, True) cls.multi_label_g = ldbc_sample_multi_labels(data_dir, True) cls.p2p = load_p2p(os.path.expandvars("${GS_TEST_DIR}"), True) cls.p2p_nx = nx.read_edgelist( os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"), nodetype=int, data=True, create_using=nx.DiGraph, ) cls.str_oid_g = ldbc_sample_string_oid(data_dir, True) @classmethod def teardown_class(cls): cls.single_label_g.unload() cls.multi_label_g.unload() cls.str_oid_g.unload() def test_error_on_wrong_nx_type(self): g = self.single_label_g with pytest.raises(NetworkXError): nx_g = nx.Graph(g) @pytest.mark.usefixtures("graphscope_session") class TestDiGraphProjectTest(TestGraphProjectTest): @classmethod def setup_class(cls): cls.NXGraph = nx.DiGraph edgelist = os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist") cls.g = nx.read_edgelist( edgelist, nodetype=int, data=True, create_using=cls.NXGraph ) cls.g.add_node(0, vdata_str="kdjfao") cls.g.add_node(1, vdata_int=123) @pytest.mark.usefixtures("graphscope_session") class TestImportNetworkxModuleWithSession(object): @classmethod def setup_class(cls): cls.session1 = graphscope.session(cluster_type="hosts", num_workers=1) cls.session2 = graphscope.session(cluster_type="hosts", num_workers=1) cls.session_lazy = graphscope.session( cluster_type="hosts", num_workers=1, mode="lazy" ) def test_import(self): import graphscope.nx as nx_default nx1 = self.session1.nx() nx2 = self.session2.nx() G = nx_default.Graph() G1 = nx1.Graph() G2 = nx2.Graph() assert G.session_id == get_default_session().session_id assert G1.session_id == self.session1.session_id assert G2.session_id == self.session2.session_id self.session1.close() self.session2.close() def test_error_import_with_wrong_session(self): with pytest.raises( RuntimeError, match="Networkx module need the session to be eager mode. Current session is lazy mode.", ): nx = self.session_lazy.nx() self.session_lazy.close()
[ "graphscope.nx.complete_graph", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.read_edgelist", "graphscope.client.session.get_default_session", "graphscope.session", "graphscope.client.session.g.add_edge", "pytest.mark.usefixtures", "graphscope.client.session.g._project_to_simple", "graphscope.nx.les_miserables_graph", "graphscope.g", "graphscope.client.session.g", "pytest.mark.skip", "graphscope.client.session.g.add_node", "graphscope.nx.DiGraph", "pytest.raises", "graphscope.nx.builtin.degree_centrality", "os.path.expandvars", "os.path.join", "os.environ.get", "graphscope.nx.builtin.sssp", "graphscope.nx.Graph" ]
[((4390, 4435), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (4413, 4435), False, 'import pytest\n'), ((18519, 18564), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (18542, 18564), False, 'import pytest\n'), ((22584, 22629), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (22607, 22629), False, 'import pytest\n'), ((23624, 23669), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (23647, 23669), False, 'import pytest\n'), ((24092, 24137), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (24115, 24137), False, 'import pytest\n'), ((1094, 1145), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (1106, 1145), False, 'import graphscope\n'), ((1485, 1555), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'oid_type': '"""string"""', 'generate_eid': '(False)'}), "(directed=directed, oid_type='string', generate_eid=False)\n", (1497, 1555), False, 'import graphscope\n'), ((2300, 2351), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (2312, 2351), False, 'import graphscope\n'), ((16103, 16188), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""TODO: open after supporting run app on arrow_property"""'}), "(reason='TODO: open after supporting run app on arrow_property'\n )\n", (16119, 16188), False, 'import pytest\n'), ((16644, 16707), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: multiple session crash in ci."""'}), "(reason='FIXME: multiple session crash in ci.')\n", (16660, 16707), False, 'import pytest\n'), ((20448, 20534), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""It use much memory, exceeds the limit of Github runner"""'}), "(reason=\n 'It use much memory, exceeds the limit of Github runner')\n", (20464, 20534), False, 'import pytest\n'), ((21317, 21395), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: engine can not catch the app throw error now"""'}), "(reason='FIXME: engine can not catch the app throw error now')\n", (21333, 21395), False, 'import pytest\n'), ((21700, 21778), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: engine can not catch the app throw error now"""'}), "(reason='FIXME: engine can not catch the app throw error now')\n", (21716, 21778), False, 'import pytest\n'), ((22065, 22129), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""find a algorithm that use vertex data"""'}), "(reason='find a algorithm that use vertex data')\n", (22081, 22129), False, 'import pytest\n'), ((22315, 22379), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""find a algorithm that use vertex data"""'}), "(reason='find a algorithm that use vertex data')\n", (22331, 22379), False, 'import pytest\n'), ((4573, 4621), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/ldbc_sample"""'], {}), "('${GS_TEST_DIR}/ldbc_sample')\n", (4591, 4621), False, 'import os\n'), ((6245, 6258), 'graphscope.client.session.g', 'g', (['empty_nx_g'], {}), '(empty_nx_g)\n', (6246, 6258), False, 'from graphscope.client.session import g\n'), ((6470, 6477), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6471, 6477), False, 'from graphscope.client.session import g\n'), ((6578, 6626), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(10)'], {'create_using': 'self.NXGraph'}), '(10, create_using=self.NXGraph)\n', (6595, 6626), True, 'import graphscope.nx as nx\n'), ((6642, 6649), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6643, 6649), False, 'from graphscope.client.session import g\n'), ((6992, 6999), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6993, 6999), False, 'from graphscope.client.session import g\n'), ((7102, 7127), 'graphscope.nx.les_miserables_graph', 'nx.les_miserables_graph', ([], {}), '()\n', (7125, 7127), True, 'import graphscope.nx as nx\n'), ((7143, 7150), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (7144, 7150), False, 'from graphscope.client.session import g\n'), ((7782, 7789), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (7783, 7789), False, 'from graphscope.client.session import g\n'), ((7979, 7987), 'graphscope.client.session.g', 'g', (['nx_g2'], {}), '(nx_g2)\n', (7980, 7987), False, 'from graphscope.client.session import g\n'), ((8176, 8184), 'graphscope.client.session.g', 'g', (['nx_g3'], {}), '(nx_g3)\n', (8177, 8184), False, 'from graphscope.client.session import g\n'), ((8399, 8407), 'graphscope.client.session.g', 'g', (['nx_g4'], {}), '(nx_g4)\n', (8400, 8407), False, 'from graphscope.client.session import g\n'), ((8800, 8807), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8801, 8807), False, 'from graphscope.client.session import g\n'), ((9126, 9133), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9127, 9133), False, 'from graphscope.client.session import g\n'), ((9251, 9258), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9252, 9258), False, 'from graphscope.client.session import g\n'), ((9373, 9380), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9374, 9380), False, 'from graphscope.client.session import g\n'), ((9482, 9489), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9483, 9489), False, 'from graphscope.client.session import g\n'), ((9695, 9702), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9696, 9702), False, 'from graphscope.client.session import g\n'), ((10152, 10159), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10153, 10159), False, 'from graphscope.client.session import g\n'), ((11024, 11035), 'graphscope.client.session.g', 'g', (['empty_nx'], {}), '(empty_nx)\n', (11025, 11035), False, 'from graphscope.client.session import g\n'), ((16275, 16342), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['nx_g', '(6)'], {'weight': '"""f2"""'}), "(nx_g, 6, weight='f2')\n", (16320, 16342), True, 'import graphscope.nx as nx\n'), ((16358, 16436), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.p2p_nx', '(6)'], {'weight': '"""weight"""'}), "(self.p2p_nx, 6, weight='weight')\n", (16403, 16436), True, 'import graphscope.nx as nx\n'), ((16762, 16817), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (16780, 16817), False, 'import graphscope\n'), ((18694, 18762), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (18712, 18762), False, 'import os\n'), ((18779, 18856), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['edgelist'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'cls.NXGraph'}), '(edgelist, nodetype=int, data=True, create_using=cls.NXGraph)\n', (18795, 18856), True, 'import graphscope.nx as nx\n'), ((20604, 20635), 'graphscope.nx.builtin.degree_centrality', 'nx.builtin.degree_centrality', (['g'], {}), '(g)\n', (20632, 20635), True, 'import graphscope.nx as nx\n'), ((20644, 20719), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""weight"""'}), "(g, source=6, weight='weight')\n", (20689, 20719), True, 'import graphscope.nx as nx\n'), ((20811, 20836), 'graphscope.client.session.g.add_node', 'g.add_node', (['(0)'], {'foo': '"""node"""'}), "(0, foo='node')\n", (20821, 20836), False, 'from graphscope.client.session import g\n'), ((21108, 21134), 'graphscope.client.session.g.add_edge', 'g.add_edge', (['(0)', '(1)'], {'weight': '(3)'}), '(0, 1, weight=3)\n', (21118, 21134), False, 'from graphscope.client.session import g\n'), ((22783, 22831), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/ldbc_sample"""'], {}), "('${GS_TEST_DIR}/ldbc_sample')\n", (22801, 22831), False, 'import os\n'), ((23817, 23885), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (23835, 23885), False, 'import os\n'), ((23902, 23979), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['edgelist'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'cls.NXGraph'}), '(edgelist, nodetype=int, data=True, create_using=cls.NXGraph)\n', (23918, 23979), True, 'import graphscope.nx as nx\n'), ((24255, 24310), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (24273, 24310), False, 'import graphscope\n'), ((24334, 24389), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (24352, 24389), False, 'import graphscope\n'), ((24417, 24485), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)', 'mode': '"""lazy"""'}), "(cluster_type='hosts', num_workers=1, mode='lazy')\n", (24435, 24485), False, 'import graphscope\n'), ((24658, 24676), 'graphscope.nx.Graph', 'nx_default.Graph', ([], {}), '()\n', (24674, 24676), True, 'import graphscope.nx as nx_default\n'), ((1193, 1232), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (1205, 1232), False, 'import os\n'), ((1310, 1365), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (1322, 1365), False, 'import os\n'), ((1603, 1642), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (1615, 1642), False, 'import os\n'), ((1720, 1775), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (1732, 1775), False, 'import os\n'), ((2006, 2045), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (2018, 2045), False, 'import os\n'), ((2123, 2178), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (2135, 2178), False, 'import os\n'), ((2995, 3031), 'os.path.join', 'os.path.join', (['prefix', '"""post_0_0.csv"""'], {}), "(prefix, 'post_0_0.csv')\n", (3007, 3031), False, 'import os\n'), ((3849, 3901), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_post_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_post_0_0.csv')\n", (3861, 3901), False, 'import os\n'), ((4798, 4834), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (4816, 4834), False, 'import os\n'), ((4894, 4962), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (4912, 4962), False, 'import os\n'), ((9828, 9974), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"""'}), '(RuntimeError, match=\n "The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"\n )\n', (9841, 9974), False, 'import pytest\n'), ((10020, 10027), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10021, 10027), False, 'from graphscope.client.session import g\n'), ((10420, 10492), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""graph view can not convert to gs graph"""'}), "(TypeError, match='graph view can not convert to gs graph')\n", (10433, 10492), False, 'import pytest\n'), ((10513, 10520), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10514, 10520), False, 'from graphscope.client.session import g\n'), ((10705, 10851), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"""'}), '(RuntimeError, match=\n "The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"\n )\n', (10718, 10851), False, 'import pytest\n'), ((10897, 10904), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10898, 10904), False, 'from graphscope.client.session import g\n'), ((12754, 12788), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (12768, 12788), False, 'import os\n'), ((16575, 16603), 'pytest.raises', 'pytest.raises', (['NetworkXError'], {}), '(NetworkXError)\n', (16588, 16603), False, 'import pytest\n'), ((16624, 16637), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['g'], {}), '(g)\n', (16634, 16637), True, 'import graphscope.nx as nx\n'), ((18095, 18197), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""graphscope graph and networkx graph not in the same session."""'}), "(RuntimeError, match=\n 'graphscope graph and networkx graph not in the same session.')\n", (18108, 18197), False, 'import pytest\n'), ((18280, 18382), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""networkx graph and graphscope graph not in the same session."""'}), "(RuntimeError, match=\n 'networkx graph and graphscope graph not in the same session.')\n", (18293, 18382), False, 'import pytest\n'), ((18432, 18440), 'graphscope.client.session.g', 'g', (['nx_g2'], {}), '(nx_g2)\n', (18433, 18440), False, 'from graphscope.client.session import g\n'), ((20272, 20364), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the vertex property foo"""'}), "(InvalidArgumentError, match=\n 'graph not contains the vertex property foo')\n", (20285, 20364), False, 'import pytest\n'), ((20850, 20945), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the vertex property weight"""'}), "(InvalidArgumentError, match=\n 'graph not contains the vertex property weight')\n", (20863, 20945), False, 'import pytest\n'), ((20981, 21018), 'graphscope.client.session.g._project_to_simple', 'g._project_to_simple', ([], {'v_prop': '"""weight"""'}), "(v_prop='weight')\n", (21001, 21018), False, 'from graphscope.client.session import g\n'), ((21148, 21239), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the edge property type"""'}), "(InvalidArgumentError, match=\n 'graph not contains the edge property type')\n", (21161, 21239), False, 'import pytest\n'), ((21275, 21310), 'graphscope.client.session.g._project_to_simple', 'g._project_to_simple', ([], {'e_prop': '"""type"""'}), "(e_prop='type')\n", (21295, 21310), False, 'from graphscope.client.session import g\n'), ((21535, 21562), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (21548, 21562), False, 'import pytest\n'), ((21576, 21668), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""edata_random_int_0"""'}), "(g, source=6, weight=\n 'edata_random_int_0')\n", (21621, 21668), True, 'import graphscope.nx as nx\n'), ((21942, 21969), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (21955, 21969), False, 'import pytest\n'), ((21983, 22058), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""weight"""'}), "(g, source=6, weight='weight')\n", (22028, 22058), True, 'import graphscope.nx as nx\n'), ((22223, 22250), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (22236, 22250), False, 'import pytest\n'), ((22264, 22308), 'graphscope.nx.builtin.sssp', 'nx.builtin.sssp', ([], {'weight': '"""vdata_random_int_0"""'}), "(weight='vdata_random_int_0')\n", (22279, 22308), True, 'import graphscope.nx as nx\n'), ((22507, 22534), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (22520, 22534), False, 'import pytest\n'), ((22548, 22580), 'graphscope.nx.builtin.sssp', 'nx.builtin.sssp', ([], {'weight': '"""weight"""'}), "(weight='weight')\n", (22563, 22580), True, 'import graphscope.nx as nx\n'), ((22998, 23034), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (23016, 23034), False, 'import os\n'), ((23093, 23161), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (23111, 23161), False, 'import os\n'), ((23560, 23588), 'pytest.raises', 'pytest.raises', (['NetworkXError'], {}), '(NetworkXError)\n', (23573, 23588), False, 'import pytest\n'), ((23609, 23620), 'graphscope.nx.Graph', 'nx.Graph', (['g'], {}), '(g)\n', (23617, 23620), True, 'import graphscope.nx as nx\n'), ((25032, 25159), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""Networkx module need the session to be eager mode. Current session is lazy mode."""'}), "(RuntimeError, match=\n 'Networkx module need the session to be eager mode. Current session is lazy mode.'\n )\n", (25045, 25159), False, 'import pytest\n'), ((24758, 24779), 'graphscope.client.session.get_default_session', 'get_default_session', ([], {}), '()\n', (24777, 24779), False, 'from graphscope.client.session import get_default_session\n'), ((2625, 2663), 'os.path.join', 'os.path.join', (['prefix', '"""person_0_0.csv"""'], {}), "(prefix, 'person_0_0.csv')\n", (2637, 2663), False, 'import os\n'), ((3616, 3667), 'os.path.join', 'os.path.join', (['prefix', '"""person_knows_person_0_0.csv"""'], {}), "(prefix, 'person_knows_person_0_0.csv')\n", (3628, 3667), False, 'import os\n'), ((4200, 4232), 'os.path.join', 'os.path.join', (['prefix', '"""p2p-31.e"""'], {}), "(prefix, 'p2p-31.e')\n", (4212, 4232), False, 'import os\n'), ((2413, 2452), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (2425, 2452), False, 'import os\n'), ((3392, 3447), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (3404, 3447), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import os import pytest np = pytest.importorskip("numpy") np_assert_equal = np.testing.assert_equal # fmt: off from networkx.tests.test_convert_numpy import \ TestConvertNumpyArray as _TestConvertNumpyArray from networkx.tests.test_convert_numpy import \ TestConvertNumpyMatrix as _TestConvertNumpyMatrix from networkx.utils import edges_equal import graphscope.nx as nx from graphscope.nx.generators.classic import barbell_graph from graphscope.nx.generators.classic import cycle_graph from graphscope.nx.generators.classic import path_graph from graphscope.nx.utils.compat import with_graphscope_nx_context # fmt: on @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(_TestConvertNumpyMatrix) class TestConvertNumpyMatrix: def assert_equal(self, G1, G2): assert sorted(G1.nodes()) == sorted(G2.nodes()) assert edges_equal(sorted(G1.edges()), sorted(G2.edges())) def test_from_numpy_matrix_type(self): pass def test_from_numpy_matrix_dtype(self): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(_TestConvertNumpyArray) class TestConvertNumpyArray: def assert_equal(self, G1, G2): assert sorted(G1.nodes()) == sorted(G2.nodes()) assert edges_equal(sorted(G1.edges()), sorted(G2.edges())) def test_from_numpy_array_type(self): pass def test_from_numpy_array_dtype(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.importorskip", "pytest.mark.usefixtures" ]
[((431, 459), 'pytest.importorskip', 'pytest.importorskip', (['"""numpy"""'], {}), "('numpy')\n", (450, 459), False, 'import pytest\n'), ((1035, 1080), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1058, 1080), False, 'import pytest\n'), ((1082, 1133), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_TestConvertNumpyMatrix'], {}), '(_TestConvertNumpyMatrix)\n', (1108, 1133), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1441, 1486), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1464, 1486), False, 'import pytest\n'), ((1488, 1538), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_TestConvertNumpyArray'], {}), '(_TestConvertNumpyArray)\n', (1514, 1538), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
import networkx.algorithms.tree.tests.test_coding import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tree.tests.test_coding, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tree.tests.test_coding import TestPruferSequence @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestPruferSequence) class TestPruferSequence(): def test_inverse(self): for seq in product(range(4), repeat=2): seq2 = nx.to_prufer_sequence(nx.from_prufer_sequence(seq)) assert list(seq) == seq2
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((423, 468), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (446, 468), False, 'import pytest\n'), ((470, 516), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestPruferSequence'], {}), '(TestPruferSequence)\n', (496, 516), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((298, 343), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (321, 343), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import hashlib import json import logging import threading from copy import deepcopy from typing import Mapping import vineyard from graphscope.client.session import get_default_session from graphscope.config import GSConfig as gs_config from graphscope.framework import dag_utils from graphscope.framework import graph_utils from graphscope.framework import utils from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.errors import check_argument from graphscope.framework.graph_schema import GraphSchema from graphscope.framework.graph_utils import EdgeLabel from graphscope.framework.graph_utils import EdgeSubLabel from graphscope.framework.graph_utils import VertexLabel from graphscope.framework.operation import Operation from graphscope.proto import types_pb2 logger = logging.getLogger("graphscope") class Graph(object): """A class for representing metadata of a graph in the GraphScope. A :class:`Graph` object holds the metadata of a graph, such as key, schema, and the graph is directed or not. It is worth noting that the graph is stored by the backend such as Analytical Engine, Vineyard. In other words, the graph object holds nothing but metadata. The following example demonstrates its usage: .. code:: python >>> import graphscope as gs >>> from graphscope.framework.loader import Loader >>> sess = gs.session() >>> graph = Graph(sess) >>> graph = graph.add_vertices("person.csv","person") >>> graph = graph.add_vertices("software.csv", "software") >>> graph = graph.add_edges("knows.csv", "knows", src_label="person", dst_label="person") >>> graph = graph.add_edges("created.csv", "created", src_label="person", dst_label="software") >>> print(graph) >>> print(graph.schema) """ def __init__( self, session=None, incoming_data=None, oid_type="int64", directed=True, generate_eid=True, ): """Construct a :class:`Graph` object. Args: session_id (str): Session id of the session the graph is created in. incoming_data: Graph can be initialized through various type of sources, which can be one of: - :class:`Operation` - :class:`nx.Graph` - :class:`Graph` - :class:`vineyard.Object`, :class:`vineyard.ObjectId` or :class:`vineyard.ObjectName` """ self._key = None self._graph_type = types_pb2.ARROW_PROPERTY self._vineyard_id = 0 self._schema = GraphSchema() if session is None: session = get_default_session() self._session = session self._detached = False self._interactive_instance_launching_thread = None self._interactive_instance_list = [] self._learning_instance_list = [] # Hold uncompleted operation for lazy evaluation self._pending_op = None # Hold a reference to base graph of modify operation, # to avoid being garbage collected self._base_graph = None oid_type = utils.normalize_data_type_str(oid_type) if oid_type not in ("int64_t", "std::string"): raise ValueError("oid_type can only be int64_t or string.") self._oid_type = oid_type self._directed = directed self._generate_eid = generate_eid self._unsealed_vertices = {} self._unsealed_edges = {} # Used to isplay schema without load into vineyard, # and do sanity checking for newly added vertices and edges. self._v_labels = [] self._e_labels = [] self._e_relationships = [] if incoming_data is not None: # Don't import the :code:`NXGraph` in top-level statements to improve the # performance of :code:`import graphscope`. from graphscope.experimental import nx if isinstance(incoming_data, Operation): self._pending_op = incoming_data if self._pending_op.type == types_pb2.PROJECT_GRAPH: self._graph_type = types_pb2.ARROW_PROJECTED elif isinstance(incoming_data, nx.Graph): self._pending_op = self._from_nx_graph(incoming_data) elif isinstance(incoming_data, Graph): self._pending_op = self._copy_from(incoming_data) elif isinstance( incoming_data, (vineyard.Object, vineyard.ObjectID, vineyard.ObjectName) ): self._pending_op = self._from_vineyard(incoming_data) else: raise RuntimeError("Not supported incoming data.") def __del__(self): # cleanly ignore all exceptions, cause session may already closed / destroyed. try: self.unload() except Exception: # pylint: disable=broad-except pass def _close_interactive_instances(self): # Close related interactive instances when graph unloaded. # Since the graph is gone, quering via interactive client is meaningless. for instance in self._interactive_instance_list: instance.close() self._interactive_instance_list.clear() def _close_learning_instances(self): for instance in self._learning_instance_list: instance.close() self._learning_instance_list.clear() def _launch_interactive_instance_impl(self): try: self._session.gremlin(self) except: # noqa: E722 # Record error msg in `InteractiveQuery` when launching failed. # Unexpect and suppress all exceptions here. pass def _from_graph_def(self, graph_def): check_argument( self._graph_type == graph_def.graph_type, "Graph type doesn't match." ) self._key = graph_def.key self._vineyard_id = graph_def.vineyard_id self._oid_type = graph_def.schema_def.oid_type self._directed = graph_def.directed self._generate_eid = graph_def.generate_eid self._schema_path = graph_def.schema_path self._schema.get_schema_from_def(graph_def.schema_def) self._v_labels = self._schema.vertex_labels self._e_labels = self._schema.edge_labels self._e_relationships = self._schema.edge_relationships # create gremlin server pod asynchronously if gs_config.initializing_interactive_engine: self._interactive_instance_launching_thread = threading.Thread( target=self._launch_interactive_instance_impl, args=() ) self._interactive_instance_launching_thread.start() def _ensure_loaded(self): if self._key is not None and self._pending_op is None: return # Unloaded if self._session is None: raise RuntimeError("The graph is not loaded") # Empty graph if self._key is None and self._pending_op is None: raise RuntimeError("Empty graph.") # Try to load if self._pending_op is not None: # Create a graph from scratch. graph_def = self._pending_op.eval() self._from_graph_def(graph_def) self._pending_op = None self._base_graph = None self._unsealed_vertices.clear() self._unsealed_edges.clear() # init saved_signature (must be after init schema) self._saved_signature = self.signature @property def key(self): """The key of the corresponding graph in engine.""" self._ensure_loaded() return self._key @property def graph_type(self): """The type of the graph object. Returns: type (`types_pb2.GraphType`): the type of the graph. """ return self._graph_type @property def schema(self): """Schema of the graph. Returns: :class:`GraphSchema`: the schema of the graph """ self._ensure_loaded() return self._schema @property def schema_path(self): """Path that Coordinator will write interactive schema path to. Returns: str: The path contains the schema. for interactive engine. """ self._ensure_loaded() return self._schema_path @property def signature(self): self._ensure_loaded() return hashlib.sha256( "{}.{}".format(self._schema.signature(), self._key).encode("utf-8") ).hexdigest() @property def template_str(self): self._ensure_loaded() # transform str/string to std::string oid_type = utils.normalize_data_type_str(self._oid_type) vid_type = self._schema.vid_type vdata_type = utils.data_type_to_cpp(self._schema.vdata_type) edata_type = utils.data_type_to_cpp(self._schema.edata_type) if self._graph_type == types_pb2.ARROW_PROPERTY: template = f"vineyard::ArrowFragment<{oid_type},{vid_type}>" elif self._graph_type == types_pb2.ARROW_PROJECTED: template = f"gs::ArrowProjectedFragment<{oid_type},{vid_type},{vdata_type},{edata_type}>" elif self._graph_type == types_pb2.DYNAMIC_PROJECTED: template = f"gs::DynamicProjectedFragment<{vdata_type},{edata_type}>" else: raise ValueError(f"Unsupported graph type: {self._graph_type}") return template @property def vineyard_id(self): """Get the vineyard object_id of this graph. Returns: str: return vineyard id of this graph """ self._ensure_loaded() return self._vineyard_id @property def session_id(self): """Get the currrent session_id. Returns: str: Return session id that the graph belongs to. """ return self._session.session_id def detach(self): """Detaching a graph makes it being left in vineyard even when the varaible for this :class:`Graph` object leaves the lexical scope. The graph can be accessed using the graph's :code:`ObjectID` or its name later. """ self._detached = True def loaded(self): try: self._ensure_loaded() except RuntimeError: return False return self._key is not None def __str__(self): v_str = "\n".join([f"VERTEX: {label}" for label in self._v_labels]) relations = [] for i in range(len(self._e_labels)): relations.extend( [(self._e_labels[i], src, dst) for src, dst in self._e_relationships[i]] ) e_str = "\n".join( [f"EDGE: {label}\tsrc: {src}\tdst: {dst}" for label, src, dst in relations] ) return f"graphscope.Graph\n{types_pb2.GraphType.Name(self._graph_type)}\n{v_str}\n{e_str}" def __repr__(self): return self.__str__() def unload(self): """Unload this graph from graphscope engine.""" if self._session is None: raise RuntimeError("The graph is not loaded") if self._key is None: self._session = None self._pending_op = None return # close interactive instances first try: if ( self._interactive_instance_launching_thread is not None and self._interactive_instance_launching_thread.is_alive() ): # join raises a RuntimeError if an attempt is made to join the current thread. # this exception occurs when a object collected by gc mechanism contains a running thread. if ( threading.current_thread() != self._interactive_instance_launching_thread ): self._interactive_instance_launching_thread.join() self._close_interactive_instances() except Exception as e: logger.error("Failed to close interactive instances: %s" % e) try: self._close_learning_instances() except Exception as e: logger.error("Failed to close learning instances: %s" % e) if not self._detached: op = dag_utils.unload_graph(self) op.eval() self._key = None self._session = None self._pending_op = None def project_to_simple(self, v_label="_", e_label="_", v_prop=None, e_prop=None): """Project a property graph to a simple graph, useful for analytical engine. Will translate name represented label or property to index, which is broadedly used in internal engine. Args: v_label (str, optional): vertex label to project. Defaults to "_". e_label (str, optional): edge label to project. Defaults to "_". v_prop (str, optional): vertex property of the v_label. Defaults to None. e_prop (str, optional): edge property of the e_label. Defaults to None. Returns: :class:`Graph`: A `Graph` instance, which graph_type is `ARROW_PROJECTED` """ self._ensure_loaded() check_argument(self.graph_type == types_pb2.ARROW_PROPERTY) self._check_unmodified() def check_out_of_range(id, length): if id >= length or id < 0: raise IndexError("id {} is out of range.".format(id)) try: if isinstance(v_label, str): v_label_id = self._schema.vertex_label_index(v_label) else: v_label_id = v_label check_out_of_range(v_label_id, self._schema.vertex_label_num) v_label = self._schema.vertex_labels[v_label_id] if isinstance(e_label, str): e_label_id = self._schema.edge_label_index(e_label) else: e_label_id = e_label check_out_of_range(e_label_id, self._schema.edge_label_num) e_label = self._schema.edge_labels[e_label] except ValueError as e: raise ValueError("Label does not exists.") from e # Check relation v_label -> e_label <- v_label exists. relation = (v_label, v_label) if relation not in self._schema.edge_relationships[e_label_id]: raise ValueError( f"Graph doesn't contain such relationship: {v_label} -> {e_label} <- {v_label}." ) try: if v_prop is None: v_prop_id = -1 vdata_type = None else: if isinstance(v_prop, str): v_prop_id = self._schema.vertex_property_index(v_label_id, v_prop) else: v_prop_id = v_prop properties = self._schema.vertex_properties[v_label_id] check_out_of_range(v_prop_id, len(properties)) vdata_type = list(properties.values())[v_prop_id] if e_prop is None: e_prop_id = -1 edata_type = None else: if isinstance(e_prop, str): e_prop_id = self._schema.edge_property_index(e_label_id, e_prop) else: e_prop_id = e_prop properties = self._schema.edge_properties[e_label_id] check_out_of_range(e_prop_id, len(properties)) edata_type = list(properties.values())[e_prop_id] except ValueError as e: raise ValueError("Property does not exists.") from e oid_type = self._schema.oid_type vid_type = self._schema.vid_type op = dag_utils.project_arrow_property_graph( self, v_label_id, v_prop_id, e_label_id, e_prop_id, vdata_type, edata_type, oid_type, vid_type, ) return Graph(self._session, op) def add_column(self, results, selector): """Add the results as a column to the graph. Modification rules are given by the selector. Args: results (:class:`Context`): A `Context` that created by doing a query. selector (dict): Select results to add as column. Format is similar to selectors in `Context` Returns: :class:`Graph`: A new `Graph` with new columns. """ self._ensure_loaded() check_argument( isinstance(selector, Mapping), "selector of add column must be a dict" ) check_argument(self.graph_type == types_pb2.ARROW_PROPERTY) self._check_unmodified() selector = { key: results._transform_selector(value) for key, value in selector.items() } selector = json.dumps(selector) op = dag_utils.add_column(self, results, selector) return Graph(self._session, op) def to_numpy(self, selector, vertex_range=None): """Select some elements of the graph and output to numpy. Args: selector (str): Select a portion of graph as a numpy.ndarray. vertex_range(dict, optional): Slice vertices. Defaults to None. Returns: `numpy.ndarray` """ check_argument(self.graph_type == types_pb2.ARROW_PROPERTY) self._ensure_loaded() self._check_unmodified() selector = utils.transform_labeled_vertex_property_data_selector(self, selector) vertex_range = utils.transform_vertex_range(vertex_range) op = dag_utils.graph_to_numpy(self, selector, vertex_range) ret = op.eval() return utils.decode_numpy(ret) def to_dataframe(self, selector, vertex_range=None): """Select some elements of the graph and output as a pandas.DataFrame Args: selector (dict): Select some portions of graph. vertex_range (dict, optional): Slice vertices. Defaults to None. Returns: `pandas.DataFrame` """ check_argument(self.graph_type == types_pb2.ARROW_PROPERTY) self._ensure_loaded() self._check_unmodified() check_argument( isinstance(selector, Mapping), "selector of to_vineyard_dataframe must be a dict", ) selector = { key: utils.transform_labeled_vertex_property_data_selector(self, value) for key, value in selector.items() } selector = json.dumps(selector) vertex_range = utils.transform_vertex_range(vertex_range) op = dag_utils.graph_to_dataframe(self, selector, vertex_range) ret = op.eval() return utils.decode_dataframe(ret) def is_directed(self): self._ensure_loaded() return self._directed def _check_unmodified(self): self._ensure_loaded() check_argument( self.signature == self._saved_signature, "Graph has been modified!" ) def _from_nx_graph(self, incoming_graph): """Create a gs graph from a nx graph. Args: incoming_graph (:class:`nx.graph`): A nx graph that contains graph data. Returns: that will be used to construct a gs.Graph Raises: TypeError: Raise Error if graph type not match. Examples: >>> nx_g = nx.path_graph(10) >>> gs_g = gs.Graph(nx_g) """ if hasattr(incoming_graph, "_graph"): msg = "graph view can not convert to gs graph" raise TypeError(msg) return dag_utils.dynamic_to_arrow(incoming_graph) def _copy_from(self, incoming_graph): """Copy a graph. Args: incoming_graph (:class:`Graph`): Source graph to be copied from Returns: :class:`Graph`: An identical graph, but with a new vineyard id. """ check_argument(incoming_graph.graph_type == types_pb2.ARROW_PROPERTY) check_argument(incoming_graph.loaded()) return dag_utils.copy_graph(incoming_graph) def _from_vineyard(self, vineyard_object): """Load a graph from a already existed vineyard graph. Args: vineyard_object (:class:`vineyard.Object`, :class:`vineyard.ObjectID` or :class:`vineyard.ObjectName`): vineyard object, which represents a graph. Returns: A graph_def. """ if isinstance(vineyard_object, vineyard.Object): return self._from_vineyard_id(vineyard_object.id) if isinstance(vineyard_object, vineyard.ObjectID): return self._from_vineyard_id(vineyard_object) if isinstance(vineyard_object, vineyard.ObjectName): return self._from_vineyard_name(vineyard_object) def _from_vineyard_id(self, vineyard_id): config = {} config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(True) config[types_pb2.VINEYARD_ID] = utils.i_to_attr(int(vineyard_id)) # FIXME(hetao) hardcode oid/vid type for codegen, when loading from vineyard # # the metadata should be retrived from vineyard config[types_pb2.OID_TYPE] = utils.s_to_attr("int64_t") config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") return dag_utils.create_graph( self.session_id, types_pb2.ARROW_PROPERTY, attrs=config ) def _from_vineyard_name(self, vineyard_name): config = {} config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(True) config[types_pb2.VINEYARD_NAME] = utils.s_to_attr(str(vineyard_name)) # FIXME(hetao) hardcode oid/vid type for codegen, when loading from vineyard # # the metadata should be retrived from vineyard config[types_pb2.OID_TYPE] = utils.s_to_attr("int64_t") config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") return dag_utils.create_graph( self.session_id, types_pb2.ARROW_PROPERTY, attrs=config ) def _attach_interactive_instance(self, instance): """Store the instance when a new interactive instance is started. Args: instance: interactive instance """ self._interactive_instance_list.append(instance) def _attach_learning_instance(self, instance): """Store the instance when a new learning instance is created. Args: instance: learning instance """ self._learning_instance_list.append(instance) def save_to(self, path, **kwargs): """Serialize graph to a location. The meta and data of graph is dumped to specified location, and can be restored by `Graph.deserialize` in other sessions. Each worker will write a `path_{worker_id}.meta` file and a `path_{worker_id}` file to storage. Args: path (str): supported storages are local, hdfs, oss, s3 """ import vineyard import vineyard.io self._ensure_loaded() sess = self._session deployment = "kubernetes" if sess.info["type"] == "k8s" else "ssh" conf = sess.info["engine_config"] vineyard_endpoint = conf["vineyard_rpc_endpoint"] vineyard_ipc_socket = conf["vineyard_socket"] if sess.info["type"] == "k8s": hosts = [ "{}:{}".format(sess.info["namespace"], s) for s in sess.info["engine_hosts"].split(",") ] else: # type == "hosts" hosts = sess.info["engine_hosts"].split(",") vineyard.io.serialize( path, vineyard.ObjectID(self._vineyard_id), type="global", vineyard_ipc_socket=vineyard_ipc_socket, vineyard_endpoint=vineyard_endpoint, storage_options=kwargs, deployment=deployment, hosts=hosts, ) @classmethod def load_from(cls, path, sess, **kwargs): """Construct a `Graph` by deserialize from `path`. It will read all serialization files, which is dumped by `Graph.serialize`. If any serialize file doesn't exists or broken, will error out. Args: path (str): Path contains the serialization files. sess (`graphscope.Session`): The target session that the graph will be construct in Returns: `Graph`: A new graph object. Schema and data is supposed to be identical with the one that called serialized method. """ import vineyard import vineyard.io deployment = "kubernetes" if sess.info["type"] == "k8s" else "ssh" conf = sess.info["engine_config"] vineyard_endpoint = conf["vineyard_rpc_endpoint"] vineyard_ipc_socket = conf["vineyard_socket"] if sess.info["type"] == "k8s": hosts = [ "{}:{}".format(sess.info["namespace"], s) for s in sess.info["engine_hosts"].split(",") ] else: # type == "hosts" hosts = sess.info["engine_hosts"].split(",") graph_id = vineyard.io.deserialize( path, type="global", vineyard_ipc_socket=vineyard_ipc_socket, vineyard_endpoint=vineyard_endpoint, storage_options=kwargs, deployment=deployment, hosts=hosts, ) return cls(sess, vineyard.ObjectID(graph_id)) def draw(self, vertices, hop=1): """Visualize the graph data in the result cell when the draw functions are invoked Args: vertices (list): selected vertices. hop (int): draw induced subgraph with hop extension. Defaults to 1. Returns: A GraphModel. """ from ipygraphin import GraphModel self._ensure_loaded() interactive_query = self._session.gremlin(self) graph = GraphModel() graph.queryGraphData(vertices, hop, interactive_query) # listen on the 1~2 hops operation of node graph.on_msg(graph.queryNeighbor) return graph def _construct_graph( self, vertices, edges, v_labels, e_labels, e_relations, mutation_func=None ): """Construct graph. 1. Construct a graph from scratch. If the vertices and edges is empty, return a empty graph. 2. Construct a graph from existed builded graph. If the vertices and edges is empty, return a copied graph. Args: vertices ([type]): [description] edges ([type]): [description] v_labels ([type]): [description] e_labels ([type]): [description] e_relations ([type]): [description] mutation_func ([type], optional): [description]. Defaults to None. Returns: [type]: [description] """ config = graph_utils.assemble_op_config( vertices.values(), edges.values(), self._oid_type, self._directed, self._generate_eid, ) # edge case. if not vertices and not edges: if mutation_func: # Rely on `self._key` return Graph(self._session, self) else: return Graph( self._session, None, self._oid_type, self._directed, self._generate_eid, ) if mutation_func: op = mutation_func(self, attrs=config) else: op = dag_utils.create_graph( self.session_id, types_pb2.ARROW_PROPERTY, attrs=config ) graph = Graph( self._session, op, self._oid_type, self._directed, self._generate_eid ) graph._unsealed_vertices = vertices graph._unsealed_edges = edges graph._v_labels = v_labels graph._e_labels = e_labels graph._e_relationships = e_relations # propage info about whether is a loaded graph. # graph._key = self._key if mutation_func: graph._base_graph = self._base_graph or self return graph def add_vertices(self, vertices, label="_", properties=[], vid_field=0): is_from_existed_graph = len(self._unsealed_vertices) != len( self._v_labels ) or len(self._unsealed_edges) != len(self._e_labels) if label in self._v_labels: raise ValueError(f"Label {label} already existed in graph.") if not self._v_labels and self._e_labels: raise ValueError("Cannot manually add vertices after inferred vertices.") unsealed_vertices = deepcopy(self._unsealed_vertices) unsealed_vertices[label] = VertexLabel( label=label, loader=vertices, properties=properties, vid_field=vid_field ) v_labels = deepcopy(self._v_labels) v_labels.append(label) # Load after validity check and before create add_vertices op. # TODO(zsy): Add ability to add vertices and edges to existed graph simultaneously. if is_from_existed_graph and self._unsealed_edges: self._ensure_loaded() func = dag_utils.add_vertices if is_from_existed_graph else None return self._construct_graph( unsealed_vertices, self._unsealed_edges, v_labels, self._e_labels, self._e_relationships, func, ) def add_edges( self, edges, label="_", properties=[], src_label=None, dst_label=None, src_field=0, dst_field=1, ): """Add edges to graph. 1. Add edges to a uninitialized graph. i. src_label and dst_label both unspecified. In this case, current graph must has 0 (we deduce vertex label from edge table, and set vertex label name to '_'), or 1 vertex label (we set src_label and dst label to this). ii. src_label and dst_label both specified and existed in current graph's vertex labels. iii. src_label and dst_label both specified and there is no vertex labels in current graph. we deduce all vertex labels from edge tables. Note that you either provide all vertex labels, or let graphscope deduce all vertex labels. We don't support mixed style. 2. Add edges to a existed graph. Must add a new kind of edge label, not a new relation to builded graph. But you can add a new relation to uninitialized part of the graph. src_label and dst_label must be specified and existed in current graph. Args: edges ([type]): [description] label (str, optional): [description]. Defaults to "_". properties ([type], optional): [description]. Defaults to None. src_label ([type], optional): [description]. Defaults to None. dst_label ([type], optional): [description]. Defaults to None. src_field (int, optional): [description]. Defaults to 0. dst_field (int, optional): [description]. Defaults to 1. Raises: RuntimeError: [description] Returns: Graph: [description] """ is_from_existed_graph = len(self._unsealed_vertices) != len( self._v_labels ) or len(self._unsealed_edges) != len(self._e_labels) if is_from_existed_graph: if label in self._e_labels and label not in self._unsealed_edges: raise ValueError("Cannot add new relation to existed graph.") if src_label is None or dst_label is None: raise ValueError("src label and dst label cannot be None.") if src_label not in self._v_labels or dst_label not in self._v_labels: raise ValueError("src label or dst_label not existed in graph.") else: if src_label is None and dst_label is None: check_argument(len(self._v_labels) <= 1, "ambiguous vertex label") if len(self._v_labels) == 1: src_label = dst_label = self._v_labels[0] else: src_label = dst_label = "_" elif src_label is not None and dst_label is not None: if self._v_labels: if ( src_label not in self._v_labels or dst_label not in self._v_labels ): raise ValueError("src label or dst_label not existed in graph.") else: # Infer all v_labels from edge tables. pass else: raise ValueError( "src and dst label must be both specified or either unspecified." ) check_argument( src_field != dst_field, "src and dst field cannot refer to the same field" ) unsealed_edges = deepcopy(self._unsealed_edges) e_labels = deepcopy(self._e_labels) relations = deepcopy(self._e_relationships) if label in unsealed_edges: assert label in self._e_labels label_idx = self._e_labels.index(label) # Will check conflict in `add_sub_label` relations[label_idx].append((src_label, dst_label)) cur_label = unsealed_edges[label] else: e_labels.append(label) relations.append([(src_label, dst_label)]) cur_label = EdgeLabel(label) cur_label.add_sub_label( EdgeSubLabel(edges, properties, src_label, dst_label, src_field, dst_field) ) unsealed_edges[label] = cur_label # Load after validity check and before create add_vertices op. # TODO(zsy): Add ability to add vertices and edges to existed graph simultaneously. if is_from_existed_graph and self._unsealed_vertices: self._ensure_loaded() func = dag_utils.add_edges if is_from_existed_graph else None return self._construct_graph( self._unsealed_vertices, unsealed_edges, self._v_labels, e_labels, relations, func, ) def remove_vertices(self, label): if label not in self._v_labels: raise ValueError(f"label {label} not in vertices.") if label not in self._unsealed_vertices: raise ValueError( "Remove vertices from a loaded graph doesn't supported yet" ) # Check whether safe to remove for rel in self._e_relationships: for sub_rel in rel: if label in sub_rel: raise ValueError( f"Vertex {label} has usage in relation {sub_rel}, please remove that edge first." ) unsealed_vertices = deepcopy(self._unsealed_vertices) v_labels = deepcopy(self._v_labels) unsealed_vertices.pop(label) v_labels.remove(label) return self._construct_graph( unsealed_vertices, self._unsealed_edges, v_labels, self._e_labels, self._e_relationships, ) def remove_edges(self, label, src_label=None, dst_label=None): if label not in self._e_labels: raise ValueError(f"label {label} not in edges") if label not in self._unsealed_edges: raise ValueError("Remove edges from a loaded graph doesn't supported yet") unsealed_edges = deepcopy(self._unsealed_edges) e_labels = deepcopy(self._e_labels) relations = deepcopy(self._e_relationships) # Calculate the items to remove remove_list = [] label_idx = e_labels.index(label) for rel in relations[label_idx]: for sub_rel in rel: if src_label is None or src_label == sub_rel[0]: if dst_label is None or dst_label == sub_rel[1]: remove_list.append(sub_rel) if not remove_list: raise ValueError("Cannot find edges to remove.") # Remove the edge label if src_label is None and dst_label is None: unsealed_edges.pop(label) e_labels.pop(label_idx) relations.pop(label_idx) else: cur_label = unsealed_edges[label] for sub_rel in remove_list: cur_label.sub_labels.pop(sub_rel) relations[label_idx].remove(sub_rel) # Remove entire label if no relations still exists. if not relations[label_idx]: unsealed_edges.pop(label) e_labels.pop(label_idx) relations.pop(label_idx) return self._construct_graph( self._unsealed_vertices, unsealed_edges, self._v_labels, e_labels, relations ) def g(incoming_data): return Graph(incoming_data=incoming_data)
[ "logging.getLogger", "vineyard.io.deserialize", "graphscope.framework.utils.normalize_data_type_str", "graphscope.proto.types_pb2.GraphType.Name", "copy.deepcopy", "ipygraphin.GraphModel", "graphscope.client.session.get_default_session", "graphscope.framework.utils.transform_vertex_range", "json.dumps", "graphscope.framework.dag_utils.graph_to_dataframe", "graphscope.framework.utils.decode_dataframe", "graphscope.framework.dag_utils.project_arrow_property_graph", "graphscope.framework.utils.transform_labeled_vertex_property_data_selector", "graphscope.framework.dag_utils.graph_to_numpy", "graphscope.framework.dag_utils.add_column", "graphscope.framework.utils.s_to_attr", "graphscope.framework.dag_utils.dynamic_to_arrow", "graphscope.framework.dag_utils.copy_graph", "graphscope.framework.graph_utils.VertexLabel", "graphscope.framework.graph_utils.EdgeSubLabel", "graphscope.framework.errors.check_argument", "graphscope.framework.graph_utils.EdgeLabel", "graphscope.framework.graph_schema.GraphSchema", "graphscope.framework.dag_utils.unload_graph", "threading.current_thread", "graphscope.framework.utils.b_to_attr", "graphscope.framework.dag_utils.create_graph", "vineyard.ObjectID", "graphscope.framework.utils.decode_numpy", "threading.Thread", "graphscope.framework.utils.data_type_to_cpp" ]
[((1480, 1511), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1497, 1511), False, 'import logging\n'), ((3314, 3327), 'graphscope.framework.graph_schema.GraphSchema', 'GraphSchema', ([], {}), '()\n', (3325, 3327), False, 'from graphscope.framework.graph_schema import GraphSchema\n'), ((3857, 3896), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['oid_type'], {}), '(oid_type)\n', (3886, 3896), False, 'from graphscope.framework import utils\n'), ((6481, 6570), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self._graph_type == graph_def.graph_type)', '"""Graph type doesn\'t match."""'], {}), '(self._graph_type == graph_def.graph_type,\n "Graph type doesn\'t match.")\n', (6495, 6570), False, 'from graphscope.framework.errors import check_argument\n'), ((9450, 9495), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['self._oid_type'], {}), '(self._oid_type)\n', (9479, 9495), False, 'from graphscope.framework import utils\n'), ((9558, 9605), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['self._schema.vdata_type'], {}), '(self._schema.vdata_type)\n', (9580, 9605), False, 'from graphscope.framework import utils\n'), ((9627, 9674), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['self._schema.edata_type'], {}), '(self._schema.edata_type)\n', (9649, 9674), False, 'from graphscope.framework import utils\n'), ((13959, 14018), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == types_pb2.ARROW_PROPERTY)\n', (13973, 14018), False, 'from graphscope.framework.errors import check_argument\n'), ((16457, 16595), 'graphscope.framework.dag_utils.project_arrow_property_graph', 'dag_utils.project_arrow_property_graph', (['self', 'v_label_id', 'v_prop_id', 'e_label_id', 'e_prop_id', 'vdata_type', 'edata_type', 'oid_type', 'vid_type'], {}), '(self, v_label_id, v_prop_id,\n e_label_id, e_prop_id, vdata_type, edata_type, oid_type, vid_type)\n', (16495, 16595), False, 'from graphscope.framework import dag_utils\n'), ((17345, 17404), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == types_pb2.ARROW_PROPERTY)\n', (17359, 17404), False, 'from graphscope.framework.errors import check_argument\n'), ((17575, 17595), 'json.dumps', 'json.dumps', (['selector'], {}), '(selector)\n', (17585, 17595), False, 'import json\n'), ((17609, 17654), 'graphscope.framework.dag_utils.add_column', 'dag_utils.add_column', (['self', 'results', 'selector'], {}), '(self, results, selector)\n', (17629, 17654), False, 'from graphscope.framework import dag_utils\n'), ((18045, 18104), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == types_pb2.ARROW_PROPERTY)\n', (18059, 18104), False, 'from graphscope.framework.errors import check_argument\n'), ((18187, 18256), 'graphscope.framework.utils.transform_labeled_vertex_property_data_selector', 'utils.transform_labeled_vertex_property_data_selector', (['self', 'selector'], {}), '(self, selector)\n', (18240, 18256), False, 'from graphscope.framework import utils\n'), ((18280, 18322), 'graphscope.framework.utils.transform_vertex_range', 'utils.transform_vertex_range', (['vertex_range'], {}), '(vertex_range)\n', (18308, 18322), False, 'from graphscope.framework import utils\n'), ((18336, 18390), 'graphscope.framework.dag_utils.graph_to_numpy', 'dag_utils.graph_to_numpy', (['self', 'selector', 'vertex_range'], {}), '(self, selector, vertex_range)\n', (18360, 18390), False, 'from graphscope.framework import dag_utils\n'), ((18430, 18453), 'graphscope.framework.utils.decode_numpy', 'utils.decode_numpy', (['ret'], {}), '(ret)\n', (18448, 18453), False, 'from graphscope.framework import utils\n'), ((18811, 18870), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == types_pb2.ARROW_PROPERTY)\n', (18825, 18870), False, 'from graphscope.framework.errors import check_argument\n'), ((19256, 19276), 'json.dumps', 'json.dumps', (['selector'], {}), '(selector)\n', (19266, 19276), False, 'import json\n'), ((19300, 19342), 'graphscope.framework.utils.transform_vertex_range', 'utils.transform_vertex_range', (['vertex_range'], {}), '(vertex_range)\n', (19328, 19342), False, 'from graphscope.framework import utils\n'), ((19357, 19415), 'graphscope.framework.dag_utils.graph_to_dataframe', 'dag_utils.graph_to_dataframe', (['self', 'selector', 'vertex_range'], {}), '(self, selector, vertex_range)\n', (19385, 19415), False, 'from graphscope.framework import dag_utils\n'), ((19455, 19482), 'graphscope.framework.utils.decode_dataframe', 'utils.decode_dataframe', (['ret'], {}), '(ret)\n', (19477, 19482), False, 'from graphscope.framework import utils\n'), ((19643, 19730), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.signature == self._saved_signature)', '"""Graph has been modified!"""'], {}), "(self.signature == self._saved_signature,\n 'Graph has been modified!')\n", (19657, 19730), False, 'from graphscope.framework.errors import check_argument\n'), ((20353, 20395), 'graphscope.framework.dag_utils.dynamic_to_arrow', 'dag_utils.dynamic_to_arrow', (['incoming_graph'], {}), '(incoming_graph)\n', (20379, 20395), False, 'from graphscope.framework import dag_utils\n'), ((20669, 20738), 'graphscope.framework.errors.check_argument', 'check_argument', (['(incoming_graph.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(incoming_graph.graph_type == types_pb2.ARROW_PROPERTY)\n', (20683, 20738), False, 'from graphscope.framework.errors import check_argument\n'), ((20802, 20838), 'graphscope.framework.dag_utils.copy_graph', 'dag_utils.copy_graph', (['incoming_graph'], {}), '(incoming_graph)\n', (20822, 20838), False, 'from graphscope.framework import dag_utils\n'), ((21709, 21730), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(True)'], {}), '(True)\n', (21724, 21730), False, 'from graphscope.framework import utils\n'), ((21993, 22019), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""int64_t"""'], {}), "('int64_t')\n", (22008, 22019), False, 'from graphscope.framework import utils\n'), ((22057, 22084), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (22072, 22084), False, 'from graphscope.framework import utils\n'), ((22100, 22179), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['self.session_id', 'types_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(self.session_id, types_pb2.ARROW_PROPERTY, attrs=config)\n', (22122, 22179), False, 'from graphscope.framework import dag_utils\n'), ((22321, 22342), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(True)'], {}), '(True)\n', (22336, 22342), False, 'from graphscope.framework import utils\n'), ((22609, 22635), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""int64_t"""'], {}), "('int64_t')\n", (22624, 22635), False, 'from graphscope.framework import utils\n'), ((22673, 22700), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (22688, 22700), False, 'from graphscope.framework import utils\n'), ((22716, 22795), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['self.session_id', 'types_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(self.session_id, types_pb2.ARROW_PROPERTY, attrs=config)\n', (22738, 22795), False, 'from graphscope.framework import dag_utils\n'), ((25941, 26132), 'vineyard.io.deserialize', 'vineyard.io.deserialize', (['path'], {'type': '"""global"""', 'vineyard_ipc_socket': 'vineyard_ipc_socket', 'vineyard_endpoint': 'vineyard_endpoint', 'storage_options': 'kwargs', 'deployment': 'deployment', 'hosts': 'hosts'}), "(path, type='global', vineyard_ipc_socket=\n vineyard_ipc_socket, vineyard_endpoint=vineyard_endpoint,\n storage_options=kwargs, deployment=deployment, hosts=hosts)\n", (25964, 26132), False, 'import vineyard\n'), ((26747, 26759), 'ipygraphin.GraphModel', 'GraphModel', ([], {}), '()\n', (26757, 26759), False, 'from ipygraphin import GraphModel\n'), ((29575, 29608), 'copy.deepcopy', 'deepcopy', (['self._unsealed_vertices'], {}), '(self._unsealed_vertices)\n', (29583, 29608), False, 'from copy import deepcopy\n'), ((29644, 29734), 'graphscope.framework.graph_utils.VertexLabel', 'VertexLabel', ([], {'label': 'label', 'loader': 'vertices', 'properties': 'properties', 'vid_field': 'vid_field'}), '(label=label, loader=vertices, properties=properties, vid_field=\n vid_field)\n', (29655, 29734), False, 'from graphscope.framework.graph_utils import VertexLabel\n'), ((29771, 29795), 'copy.deepcopy', 'deepcopy', (['self._v_labels'], {}), '(self._v_labels)\n', (29779, 29795), False, 'from copy import deepcopy\n'), ((33827, 33921), 'graphscope.framework.errors.check_argument', 'check_argument', (['(src_field != dst_field)', '"""src and dst field cannot refer to the same field"""'], {}), "(src_field != dst_field,\n 'src and dst field cannot refer to the same field')\n", (33841, 33921), False, 'from graphscope.framework.errors import check_argument\n'), ((33966, 33996), 'copy.deepcopy', 'deepcopy', (['self._unsealed_edges'], {}), '(self._unsealed_edges)\n', (33974, 33996), False, 'from copy import deepcopy\n'), ((34016, 34040), 'copy.deepcopy', 'deepcopy', (['self._e_labels'], {}), '(self._e_labels)\n', (34024, 34040), False, 'from copy import deepcopy\n'), ((34061, 34092), 'copy.deepcopy', 'deepcopy', (['self._e_relationships'], {}), '(self._e_relationships)\n', (34069, 34092), False, 'from copy import deepcopy\n'), ((35896, 35929), 'copy.deepcopy', 'deepcopy', (['self._unsealed_vertices'], {}), '(self._unsealed_vertices)\n', (35904, 35929), False, 'from copy import deepcopy\n'), ((35949, 35973), 'copy.deepcopy', 'deepcopy', (['self._v_labels'], {}), '(self._v_labels)\n', (35957, 35973), False, 'from copy import deepcopy\n'), ((36567, 36597), 'copy.deepcopy', 'deepcopy', (['self._unsealed_edges'], {}), '(self._unsealed_edges)\n', (36575, 36597), False, 'from copy import deepcopy\n'), ((36617, 36641), 'copy.deepcopy', 'deepcopy', (['self._e_labels'], {}), '(self._e_labels)\n', (36625, 36641), False, 'from copy import deepcopy\n'), ((36662, 36693), 'copy.deepcopy', 'deepcopy', (['self._e_relationships'], {}), '(self._e_relationships)\n', (36670, 36693), False, 'from copy import deepcopy\n'), ((3378, 3399), 'graphscope.client.session.get_default_session', 'get_default_session', ([], {}), '()\n', (3397, 3399), False, 'from graphscope.client.session import get_default_session\n'), ((7269, 7341), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._launch_interactive_instance_impl', 'args': '()'}), '(target=self._launch_interactive_instance_impl, args=())\n', (7285, 7341), False, 'import threading\n'), ((13036, 13064), 'graphscope.framework.dag_utils.unload_graph', 'dag_utils.unload_graph', (['self'], {}), '(self)\n', (13058, 13064), False, 'from graphscope.framework import dag_utils\n'), ((19113, 19179), 'graphscope.framework.utils.transform_labeled_vertex_property_data_selector', 'utils.transform_labeled_vertex_property_data_selector', (['self', 'value'], {}), '(self, value)\n', (19166, 19179), False, 'from graphscope.framework import utils\n'), ((24431, 24467), 'vineyard.ObjectID', 'vineyard.ObjectID', (['self._vineyard_id'], {}), '(self._vineyard_id)\n', (24448, 24467), False, 'import vineyard\n'), ((26244, 26271), 'vineyard.ObjectID', 'vineyard.ObjectID', (['graph_id'], {}), '(graph_id)\n', (26261, 26271), False, 'import vineyard\n'), ((28449, 28528), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['self.session_id', 'types_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(self.session_id, types_pb2.ARROW_PROPERTY, attrs=config)\n', (28471, 28528), False, 'from graphscope.framework import dag_utils\n'), ((34515, 34531), 'graphscope.framework.graph_utils.EdgeLabel', 'EdgeLabel', (['label'], {}), '(label)\n', (34524, 34531), False, 'from graphscope.framework.graph_utils import EdgeLabel\n'), ((34577, 34652), 'graphscope.framework.graph_utils.EdgeSubLabel', 'EdgeSubLabel', (['edges', 'properties', 'src_label', 'dst_label', 'src_field', 'dst_field'], {}), '(edges, properties, src_label, dst_label, src_field, dst_field)\n', (34589, 34652), False, 'from graphscope.framework.graph_utils import EdgeSubLabel\n'), ((11603, 11645), 'graphscope.proto.types_pb2.GraphType.Name', 'types_pb2.GraphType.Name', (['self._graph_type'], {}), '(self._graph_type)\n', (11627, 11645), False, 'from graphscope.proto import types_pb2\n'), ((12491, 12517), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (12515, 12517), False, 'import threading\n')]
import os import pytest import graphscope @pytest.fixture(scope="session") def graphscope_session(): sess = graphscope.session(run_on_local=True, show_log=True, num_workers=1) sess.as_default() yield sess sess.close()
[ "pytest.fixture", "graphscope.session" ]
[((47, 78), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (61, 78), False, 'import pytest\n'), ((116, 183), 'graphscope.session', 'graphscope.session', ([], {'run_on_local': '(True)', 'show_log': '(True)', 'num_workers': '(1)'}), '(run_on_local=True, show_log=True, num_workers=1)\n', (134, 183), False, 'import graphscope\n')]
#!/usr/bin/env python # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pytest import graphscope.nx as nx @pytest.mark.usefixtures("graphscope_session") class BaseTestAttributeMixing: def setup_method(cls): G = nx.Graph() G.add_nodes_from([0, 1], fish='one') G.add_nodes_from([2, 3], fish='two') G.add_nodes_from([4], fish='red') G.add_nodes_from([5], fish='blue') G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) cls.G = G D = nx.DiGraph() D.add_nodes_from([0, 1], fish='one') D.add_nodes_from([2, 3], fish='two') D.add_nodes_from([4], fish='red') D.add_nodes_from([5], fish='blue') D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) cls.D = D S = nx.Graph() S.add_nodes_from([0, 1], fish='one') S.add_nodes_from([2, 3], fish='two') S.add_nodes_from([4], fish='red') S.add_nodes_from([5], fish='blue') S.add_edge(0, 0) S.add_edge(2, 2) cls.S = S @pytest.mark.usefixtures("graphscope_session") class BaseTestDegreeMixing: def setup_method(cls): cls.P4 = nx.path_graph(4) cls.D = nx.DiGraph() cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) cls.S = nx.Graph() cls.S.add_edges_from([(0, 0), (1, 1)])
[ "graphscope.nx.DiGraph", "pytest.mark.usefixtures", "graphscope.nx.Graph", "graphscope.nx.path_graph" ]
[((420, 465), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (443, 465), False, 'import pytest\n'), ((1347, 1392), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1370, 1392), False, 'import pytest\n'), ((536, 546), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (544, 546), True, 'import graphscope.nx as nx\n'), ((812, 824), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (822, 824), True, 'import graphscope.nx as nx\n'), ((1090, 1100), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1098, 1100), True, 'import graphscope.nx as nx\n'), ((1465, 1481), 'graphscope.nx.path_graph', 'nx.path_graph', (['(4)'], {}), '(4)\n', (1478, 1481), True, 'import graphscope.nx as nx\n'), ((1498, 1510), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (1508, 1510), True, 'import graphscope.nx as nx\n'), ((1590, 1600), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1598, 1600), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pytest from networkx.generators.tests.test_classic import TestGeneratorClassic import graphscope.nx as nx from graphscope.nx import is_isomorphic from graphscope.nx.utils.compat import with_graphscope_nx_context from graphscope.nx.utils.misc import edges_equal @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGeneratorClassic) class TestGeneratorClassic: @pytest.mark.skip(reason="FIXME: test take too much time.") def test_dorogovtsev_goltsev_mendes_graph(self): pass def test_ladder_graph(self): for i, G in [ (0, nx.empty_graph(0)), (1, nx.path_graph(2)), ]: assert is_isomorphic(nx.ladder_graph(i), G) pytest.raises(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph) g = nx.ladder_graph(2) mg = nx.ladder_graph(2, create_using=nx.MultiGraph) assert edges_equal(mg.edges(), g.edges())
[ "graphscope.nx.ladder_graph", "graphscope.nx.empty_graph", "pytest.mark.skip", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.raises", "pytest.mark.usefixtures", "graphscope.nx.path_graph" ]
[((672, 717), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (695, 717), False, 'import pytest\n'), ((719, 767), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGeneratorClassic'], {}), '(TestGeneratorClassic)\n', (745, 767), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((801, 859), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: test take too much time."""'}), "(reason='FIXME: test take too much time.')\n", (817, 859), False, 'import pytest\n'), ((1129, 1205), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.ladder_graph', '(2)'], {'create_using': 'nx.DiGraph'}), '(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph)\n', (1142, 1205), False, 'import pytest\n'), ((1219, 1237), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['(2)'], {}), '(2)\n', (1234, 1237), True, 'import graphscope.nx as nx\n'), ((1251, 1297), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['(2)'], {'create_using': 'nx.MultiGraph'}), '(2, create_using=nx.MultiGraph)\n', (1266, 1297), True, 'import graphscope.nx as nx\n'), ((998, 1015), 'graphscope.nx.empty_graph', 'nx.empty_graph', (['(0)'], {}), '(0)\n', (1012, 1015), True, 'import graphscope.nx as nx\n'), ((1034, 1050), 'graphscope.nx.path_graph', 'nx.path_graph', (['(2)'], {}), '(2)\n', (1047, 1050), True, 'import graphscope.nx as nx\n'), ((1097, 1115), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['i'], {}), '(i)\n', (1112, 1115), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import hashlib import json import logging import threading from abc import ABCMeta from abc import abstractmethod from copy import deepcopy from itertools import chain from typing import List from typing import Mapping from typing import Union try: import vineyard except ImportError: vineyard = None from graphscope.config import GSConfig as gs_config from graphscope.framework import dag_utils from graphscope.framework import graph_utils from graphscope.framework import utils from graphscope.framework.dag import DAGNode from graphscope.framework.errors import check_argument from graphscope.framework.graph_schema import GraphSchema from graphscope.framework.graph_utils import EdgeLabel from graphscope.framework.graph_utils import EdgeSubLabel from graphscope.framework.graph_utils import VertexLabel from graphscope.framework.operation import Operation from graphscope.framework.utils import data_type_to_cpp from graphscope.proto import attr_value_pb2 from graphscope.proto import graph_def_pb2 from graphscope.proto import types_pb2 logger = logging.getLogger("graphscope") class GraphInterface(metaclass=ABCMeta): """Base Class to derive GraphDAGNode and Graph""" def __init__(self): self._session = None @abstractmethod def add_column(self, results, selector): raise NotImplementedError @abstractmethod def add_vertices(self, vertices, label="_", properties=None, vid_field=0): raise NotImplementedError @abstractmethod def add_edges( self, edges, label="_", properties=None, src_label=None, dst_label=None, src_field=0, dst_field=1, ): raise NotImplementedError @abstractmethod def unload(self): raise NotImplementedError def to_numpy(self, selector, vertex_range=None): raise NotImplementedError def to_dataframe(self, selector, vertex_range=None): raise NotImplementedError def save_to(self, path, **kwargs): raise NotImplementedError def load_from(cls, path, sess, **kwargs): raise NotImplementedError @abstractmethod def project(self, vertices, edges): raise NotImplementedError def _from_nx_graph(self, g): """Create a gs graph from a nx graph. Args: g (:class:`graphscope.nx.graph`): A nx graph that contains graph data. Raises: RuntimeError: NX graph and gs graph not in the same session. TypeError: Convert a graph view of nx graph to gs graph. Returns: :class:`graphscope.framework.operation.Operation` that will be used to construct a :class:`graphscope.Graph` Examples: .. code:: python >>> import graphscope as gs >>> nx_g = gs.nx.path_graph(10) >>> gs_g = gs.Graph(nx_g) """ if self.session_id != g.session_id: raise RuntimeError( "networkx graph and graphscope graph not in the same session." ) if hasattr(g, "_graph"): raise TypeError("graph view can not convert to gs graph") return dag_utils.dynamic_to_arrow(g) def _from_vineyard(self, vineyard_object): """Load a graph from a already existed vineyard graph. Args: vineyard_object (:class:`vineyard.Object`, :class:`vineyard.ObjectID` or :class:`vineyard.ObjectName`): vineyard object, which represents a graph. Returns: :class:`graphscope.framework.operation.Operation` """ if isinstance(vineyard_object, vineyard.Object): return self._construct_op_from_vineyard_id(vineyard_object.id) if isinstance(vineyard_object, vineyard.ObjectID): return self._construct_op_from_vineyard_id(vineyard_object) if isinstance(vineyard_object, vineyard.ObjectName): return self._construct_op_from_vineyard_name(vineyard_object) def _construct_op_from_vineyard_id(self, vineyard_id): assert self._session is not None config = {} config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(True) config[types_pb2.VINEYARD_ID] = utils.i_to_attr(int(vineyard_id)) # FIXME(hetao) hardcode oid/vid type for codegen, when loading from vineyard # # the metadata should be retrived from vineyard config[types_pb2.OID_TYPE] = utils.s_to_attr("int64_t") config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") return dag_utils.create_graph( self.session_id, graph_def_pb2.ARROW_PROPERTY, attrs=config ) def _construct_op_from_vineyard_name(self, vineyard_name): assert self._session is not None config = {} config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(True) config[types_pb2.VINEYARD_NAME] = utils.s_to_attr(str(vineyard_name)) # FIXME(hetao) hardcode oid/vid type for codegen, when loading from vineyard # # the metadata should be retrived from vineyard config[types_pb2.OID_TYPE] = utils.s_to_attr("int64_t") config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") return dag_utils.create_graph( self.session_id, graph_def_pb2.ARROW_PROPERTY, attrs=config ) def _construct_op_of_empty_graph(self): config = {} config[types_pb2.ARROW_PROPERTY_DEFINITION] = attr_value_pb2.AttrValue() config[types_pb2.DIRECTED] = utils.b_to_attr(self._directed) config[types_pb2.GENERATE_EID] = utils.b_to_attr(self._generate_eid) config[types_pb2.OID_TYPE] = utils.s_to_attr(self._oid_type) config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(False) return dag_utils.create_graph( self.session_id, graph_def_pb2.ARROW_PROPERTY, inputs=None, attrs=config ) class GraphDAGNode(DAGNode, GraphInterface): """A class represents a graph node in a DAG. In GraphScope, all operations that generate a new graph will return a instance of :class:`GraphDAGNode`, which will be automatically executed by :method:`sess.run` in `eager` mode. The following example demonstrates its usage: .. code:: python >>> # lazy mode >>> import graphscope as gs >>> sess = gs.session(mode="lazy") >>> g = sess.g() >>> g1 = g.add_vertices("person.csv","person") >>> print(g1) # <graphscope.framework.graph.GraphDAGNode object> >>> g2 = sess.run(g1) >>> print(g2) # <graphscope.framework.graph.Graph object> >>> # eager mode >>> import graphscope as gs >>> sess = gs.session(mode="eager") >>> g = sess.g() >>> g1 = g.add_vertices("person.csv","person") >>> print(g1) # <graphscope.framework.graph.Graph object> >>> g1.unload() """ def __init__( self, session, incoming_data=None, oid_type="int64", directed=True, generate_eid=True, ): """Construct a :class:`GraphDAGNode` object. Args: session (:class:`Session`): A graphscope session instance. incoming_data: Graph can be initialized through various type of sources, which can be one of: - :class:`graphscope.framework.operation.Operation` - :class:`graphscope.nx.Graph` - :class:`graphscope.Graph` - :class:`vineyard.Object`, :class:`vineyard.ObjectId` or :class:`vineyard.ObjectName` oid_type: (str, optional): Type of vertex original id. Defaults to "int64". directed: (bool, optional): Directed graph or not. Defaults to True. generate_eid: (bool, optional): Generate id for each edge when setted True. Defaults to True. """ super().__init__() self._session = session oid_type = utils.normalize_data_type_str(oid_type) if oid_type not in ("int64_t", "std::string"): raise ValueError("oid_type can only be int64_t or string.") self._oid_type = oid_type self._directed = directed self._generate_eid = generate_eid self._graph_type = graph_def_pb2.ARROW_PROPERTY # list of pair <parent_op_key, VertexLabel/EdgeLabel> self._unsealed_vertices_and_edges = list() # check for newly added vertices and edges. self._v_labels = list() self._e_labels = list() self._e_relationships = list() self._base_graph = None # add op to dag self._resolve_op(incoming_data) self._session.dag.add_op(self._op) @property def v_labels(self): return self._v_labels @v_labels.setter def v_labels(self, value): self._v_labels = value @property def e_labels(self): return self._e_labels @e_labels.setter def e_labels(self, value): self._e_labels = value @property def e_relationships(self): return self._e_relationships @e_relationships.setter def e_relationships(self, value): self._e_relationships = value @property def graph_type(self): """The type of the graph object. Returns: type (`types_pb2.GraphType`): the type of the graph. """ return self._graph_type def _project_to_simple(self): check_argument(self.graph_type == graph_def_pb2.ARROW_PROPERTY) op = dag_utils.project_arrow_property_graph_to_simple(self) # construct dag node graph_dag_node = GraphDAGNode(self._session, op) graph_dag_node._base_graph = self return graph_dag_node def _resolve_op(self, incoming_data): # Don't import the :code:`NXGraph` in top-level statements to improve the # performance of :code:`import graphscope`. from graphscope import nx if incoming_data is None: # create dag node of empty graph self._op = self._construct_op_of_empty_graph() elif isinstance(incoming_data, Operation): self._op = incoming_data if self._op.type == types_pb2.PROJECT_TO_SIMPLE: self._graph_type = graph_def_pb2.ARROW_PROJECTED elif isinstance(incoming_data, nx.classes.graph._GraphBase): self._op = self._from_nx_graph(incoming_data) elif isinstance(incoming_data, Graph): self._op = dag_utils.copy_graph(incoming_data) self._graph_type = incoming_data.graph_type elif isinstance(incoming_data, GraphDAGNode): if incoming_data.session_id != self.session_id: raise RuntimeError("{0} not in the same session.".formar(incoming_data)) raise NotImplementedError elif vineyard is not None and isinstance( incoming_data, (vineyard.Object, vineyard.ObjectID, vineyard.ObjectName) ): self._op = self._from_vineyard(incoming_data) else: raise RuntimeError("Not supported incoming data.") def to_numpy(self, selector, vertex_range=None): """Select some elements of the graph and output to numpy. Args: selector (str): Select a portion of graph as a numpy.ndarray. vertex_range(dict, optional): Slice vertices. Defaults to None. Returns: :class:`graphscope.framework.context.ResultDAGNode`: A result holds the `numpy.ndarray`, evaluated in eager mode. """ # avoid circular import from graphscope.framework.context import ResultDAGNode check_argument(self.graph_type == graph_def_pb2.ARROW_PROPERTY) vertex_range = utils.transform_vertex_range(vertex_range) op = dag_utils.graph_to_numpy(self, selector, vertex_range) return ResultDAGNode(self, op) def to_dataframe(self, selector, vertex_range=None): """Select some elements of the graph and output as a pandas.DataFrame Args: selector (dict): Select some portions of graph. vertex_range (dict, optional): Slice vertices. Defaults to None. Returns: :class:`graphscope.framework.context.ResultDAGNode`: A result holds the `pandas.DataFrame`, evaluated in eager mode. """ # avoid circular import from graphscope.framework.context import ResultDAGNode check_argument(self.graph_type == graph_def_pb2.ARROW_PROPERTY) check_argument( isinstance(selector, Mapping), "selector of to dataframe must be a dict", ) selector = json.dumps(selector) vertex_range = utils.transform_vertex_range(vertex_range) op = dag_utils.graph_to_dataframe(self, selector, vertex_range) return ResultDAGNode(self, op) def add_vertices(self, vertices, label="_", properties=None, vid_field=0): """Add vertices to the graph, and return a new graph. Args: vertices (Union[str, Loader]): Vertex data source. label (str, optional): Vertex label name. Defaults to "_". properties (list[str], optional): List of column names loaded as properties. Defaults to None. vid_field (int or str, optional): Column index or property name used as id field. Defaults to 0. Raises: ValueError: If the given value is invalid or conflict with current graph. Returns: :class:`graphscope.framework.graph.GraphDAGNode`: A new graph with vertex added, evaluated in eager mode. """ if label in self._v_labels: raise ValueError(f"Label {label} already existed in graph.") if not self._v_labels and self._e_labels: raise ValueError("Cannot manually add vertices after inferred vertices.") unsealed_vertices_and_edges = deepcopy(self._unsealed_vertices_and_edges) vertex_label = VertexLabel( label=label, loader=vertices, properties=properties, vid_field=vid_field, id_type=self._oid_type, session_id=self._session.session_id, ) unsealed_vertices_and_edges.append((self.op.key, vertex_label)) v_labels = deepcopy(self._v_labels) v_labels.append(label) # generate and add a loader op to dag loader_op = dag_utils.create_loader(vertex_label) self._session.dag.add_op(loader_op) # construct add label op op = dag_utils.add_labels_to_graph(self, loader_op) # construct dag node graph_dag_node = GraphDAGNode( self._session, op, self._oid_type, self._directed, self._generate_eid ) graph_dag_node._v_labels = v_labels graph_dag_node._e_labels = self._e_labels graph_dag_node._e_relationships = self._e_relationships graph_dag_node._unsealed_vertices_and_edges = unsealed_vertices_and_edges graph_dag_node._base_graph = self return graph_dag_node def add_edges( self, edges, label="_", properties=None, src_label=None, dst_label=None, src_field=0, dst_field=1, ): """Add edges to the graph, and return a new graph. 1. Add edges to a uninitialized graph. i. src_label and dst_label both unspecified. In this case, current graph must has 0 (we deduce vertex label from edge table, and set vertex label name to '_'), or 1 vertex label (we set src_label and dst label to this). ii. src_label and dst_label both specified and existed in current graph's vertex labels. iii. src_label and dst_label both specified and there is no vertex labels in current graph. we deduce all vertex labels from edge tables. Note that you either provide all vertex labels, or let graphscope deduce all vertex labels. We don't support mixed style. 2. Add edges to a existed graph. Must add a new kind of edge label, not a new relation to builded graph. But you can add a new relation to uninitialized part of the graph. src_label and dst_label must be specified and existed in current graph. Args: edges (Union[str, Loader]): Edge data source. label (str, optional): Edge label name. Defaults to "_". properties (list[str], optional): List of column names loaded as properties. Defaults to None. src_label (str, optional): Source vertex label. Defaults to None. dst_label (str, optional): Destination vertex label. Defaults to None. src_field (int, optional): Column index or name used as src field. Defaults to 0. dst_field (int, optional): Column index or name used as dst field. Defaults to 1. Raises: ValueError: If the given value is invalid or conflict with current graph. Returns: :class:`graphscope.framework.graph.GraphDAGNode`: A new graph with edge added, evaluated in eager mode. """ if src_label is None and dst_label is None: check_argument( len(self._v_labels) <= 1, "Ambiguous vertex label, please specify the src_label and dst_label.", ) if len(self._v_labels) == 1: src_label = dst_label = self._v_labels[0] else: src_label = dst_label = "_" if src_label is None or dst_label is None: raise ValueError( "src and dst label must be both specified or either unspecified." ) if self._v_labels: if src_label not in self._v_labels or dst_label not in self._v_labels: raise ValueError("src label or dst_label not existed in graph.") else: # We can infer all vertices label in the graph constructing stage. pass check_argument( src_field != dst_field, "src and dst field cannot refer to the same field" ) if self.evaluated: if label in self._e_labels: raise ValueError(f"Label {label} already existed in graph") unsealed_vertices = list() unsealed_edges = list() e_labels = deepcopy(self._e_labels) relations = deepcopy(self._e_relationships) parent = self if label in self.e_labels: # aggregate op with the same edge label fork = False unsealed_vertices_and_edges = list() for parent_op_key, unsealed_v_or_e in self._unsealed_vertices_and_edges: if ( isinstance(unsealed_v_or_e, EdgeLabel) and unsealed_v_or_e.label == label ): parent = self._backtrack_graph_dag_node_by_op_key(parent_op_key) cur_label = unsealed_v_or_e cur_label.add_sub_label( EdgeSubLabel( edges, properties, src_label, dst_label, src_field, dst_field, id_type=self._oid_type, ) ) fork = True else: unsealed_vertices_and_edges.append((parent_op_key, unsealed_v_or_e)) if fork: if isinstance(unsealed_v_or_e, VertexLabel): unsealed_vertices.append(unsealed_v_or_e) else: unsealed_edges.append(unsealed_v_or_e) unsealed_edges.append(cur_label) unsealed_vertices_and_edges.append((parent.op.key, cur_label)) else: unsealed_vertices_and_edges = deepcopy(self._unsealed_vertices_and_edges) e_labels.append(label) relations.append([(src_label, dst_label)]) cur_label = EdgeLabel(label, self._oid_type, self._session.session_id) cur_label.add_sub_label( EdgeSubLabel( edges, properties, src_label, dst_label, src_field, dst_field, id_type=self._oid_type, ) ) unsealed_edges.append(cur_label) unsealed_vertices_and_edges.append((parent.op.key, cur_label)) # generate and add a loader op to dag loader_op = dag_utils.create_loader(unsealed_vertices + unsealed_edges) self._session.dag.add_op(loader_op) # construct add label op op = dag_utils.add_labels_to_graph(parent, loader_op) # construct dag node graph_dag_node = GraphDAGNode( self._session, op, self._oid_type, self._directed, self._generate_eid ) graph_dag_node._v_labels = self._v_labels graph_dag_node._e_labels = e_labels graph_dag_node._e_relationships = relations graph_dag_node._unsealed_vertices_and_edges = unsealed_vertices_and_edges graph_dag_node._base_graph = parent return graph_dag_node def _backtrack_graph_dag_node_by_op_key(self, key): if self.op.key == key: return self graph_dag_node = self._base_graph while graph_dag_node is not None: if graph_dag_node.op.key == key: return graph_dag_node graph_dag_node = graph_dag_node._base_graph def add_column(self, results, selector): """Add the results as a column to the graph. Modification rules are given by the selector. Args: results: A instance of concrete class derive from (:class:`graphscope.framework.context.BaseContextDAGNode`): A context that created by doing an app query on a graph, and holds the corresponding results. selector (dict): Select results to add as column. Format is similar to selectors in :class:`graphscope.framework.context.Context` Returns: :class:`graphscope.framework.graph.GraphDAGNode`: A new graph with new columns, evaluated in eager mode. """ check_argument( isinstance(selector, Mapping), "selector of add column must be a dict" ) for key, value in selector.items(): results._check_selector(value) selector = json.dumps(selector) op = dag_utils.add_column(self, results, selector) graph_dag_node = GraphDAGNode(self._session, op) graph_dag_node._base_graph = self return graph_dag_node def unload(self): """Unload this graph from graphscope engine. Returns: :class:`graphscope.framework.graph.UnloadedGraph`: Evaluated in eager mode. """ op = dag_utils.unload_graph(self) return UnloadedGraph(self._session, op) def project( self, vertices: Mapping[str, Union[List[str], None]], edges: Mapping[str, Union[List[str], None]], ): """Project a subgraph from the property graph, and return a new graph. A graph produced by project just like a normal property graph, and can be projected further. Args: vertices (dict): key is the vertex label name, the value is a list of str, which represents the name of properties. Specifically, it will select all properties if value is None. Note that, the label of the vertex in all edges you want to project should be included. edges (dict): key is the edge label name, the value is a list of str, which represents the name of properties. Specifically, it will select all properties if value is None. Returns: :class:`graphscope.framework.graph.GraphDAGNode`: A new graph projected from the property graph, evaluated in eager mode. """ check_argument(self.graph_type == graph_def_pb2.ARROW_PROPERTY) op = dag_utils.project_arrow_property_graph( self, json.dumps(vertices), json.dumps(edges) ) # construct dag node graph_dag_node = GraphDAGNode(self._session, op) graph_dag_node._base_graph = self return graph_dag_node class Graph(GraphInterface): """A class for representing metadata of a graph in the GraphScope. A :class:`Graph` object holds the metadata of a graph, such as key, schema, and the graph is directed or not. It is worth noticing that the graph is stored by the backend such as Analytical Engine, Vineyard. In other words, the graph object holds nothing but metadata. The following example demonstrates its usage: .. code:: python >>> import graphscope as gs >>> sess = gs.session() >>> graph = sess.g() >>> graph = graph.add_vertices("person.csv","person") >>> graph = graph.add_vertices("software.csv", "software") >>> graph = graph.add_edges("knows.csv", "knows", src_label="person", dst_label="person") >>> graph = graph.add_edges("created.csv", "created", src_label="person", dst_label="software") >>> print(graph) >>> print(graph.schema) """ def __init__( self, graph_node, ): """Construct a :class:`Graph` object.""" self._graph_node = graph_node self._session = self._graph_node.session # copy and set op evaluated self._graph_node.op = deepcopy(self._graph_node.op) self._graph_node.evaluated = True self._session.dag.add_op(self._graph_node.op) self._key = None self._vineyard_id = 0 self._schema = GraphSchema() self._detached = False self._interactive_instance_launching_thread = None self._interactive_instance_list = [] self._learning_instance_list = [] def __del__(self): # cleanly ignore all exceptions, cause session may already closed / destroyed. try: self.unload() except Exception: # pylint: disable=broad-except pass def _close_interactive_instances(self): # Close related interactive instances when graph unloaded. # Since the graph is gone, quering via interactive client is meaningless. for instance in self._interactive_instance_list: instance.close() self._interactive_instance_list.clear() def _close_learning_instances(self): for instance in self._learning_instance_list: instance.close() self._learning_instance_list.clear() def _launch_interactive_instance_impl(self): try: self._session.gremlin(self) except: # noqa: E722 # Record error msg in `InteractiveQuery` when launching failed. # Unexpect and suppress all exceptions here. pass def update_from_graph_def(self, graph_def): check_argument( self._graph_node.graph_type == graph_def.graph_type, "Graph type doesn't match {} versus {}".format( self._graph_node.graph_type, graph_def.graph_type ), ) self._key = graph_def.key self._directed = graph_def.directed vy_info = graph_def_pb2.VineyardInfoPb() graph_def.extension.Unpack(vy_info) self._vineyard_id = vy_info.vineyard_id self._oid_type = data_type_to_cpp(vy_info.oid_type) self._generate_eid = vy_info.generate_eid self._schema_path = vy_info.schema_path self._schema.from_graph_def(graph_def) self._v_labels = self._schema.vertex_labels self._e_labels = self._schema.edge_labels self._e_relationships = self._schema.edge_relationships # init saved_signature (must be after init schema) self._saved_signature = self.signature # create gremlin server pod asynchronously if self._session.eager() and gs_config.initializing_interactive_engine: self._interactive_instance_launching_thread = threading.Thread( target=self._launch_interactive_instance_impl, args=() ) self._interactive_instance_launching_thread.start() def __getattr__(self, name): if hasattr(self._graph_node, name): return getattr(self._graph_node, name) else: raise AttributeError("{0} not found.".format(name)) @property def key(self): """The key of the corresponding graph in engine.""" return self._key @property def schema(self): """Schema of the graph. Returns: :class:`GraphSchema`: the schema of the graph """ return self._schema @property def schema_path(self): """Path that Coordinator will write interactive schema path to. Returns: str: The path contains the schema. for interactive engine. """ return self._schema_path @property def signature(self): return hashlib.sha256( "{}.{}".format(self._schema.signature(), self._key).encode("utf-8") ).hexdigest() @property def op(self): return self._graph_node.op @property def template_str(self): # transform str/string to std::string oid_type = utils.normalize_data_type_str(self._oid_type) vid_type = self._schema.vid_type vdata_type = utils.data_type_to_cpp(self._schema.vdata_type) edata_type = utils.data_type_to_cpp(self._schema.edata_type) if self._graph_type == graph_def_pb2.ARROW_PROPERTY: template = f"vineyard::ArrowFragment<{oid_type},{vid_type}>" elif self._graph_type == graph_def_pb2.ARROW_PROJECTED: template = f"gs::ArrowProjectedFragment<{oid_type},{vid_type},{vdata_type},{edata_type}>" elif self._graph_type == graph_def_pb2.DYNAMIC_PROJECTED: template = f"gs::DynamicProjectedFragment<{vdata_type},{edata_type}>" else: raise ValueError(f"Unsupported graph type: {self._graph_type}") return template @property def vineyard_id(self): """Get the vineyard object_id of this graph. Returns: str: return vineyard id of this graph """ return self._vineyard_id @property def session_id(self): """Get the currrent session_id. Returns: str: Return session id that the graph belongs to. """ return self._session.session_id def detach(self): """Detaching a graph makes it being left in vineyard even when the varaible for this :class:`Graph` object leaves the lexical scope. The graph can be accessed using the graph's :code:`ObjectID` or its name later. """ self._detached = True def loaded(self): return self._key is not None def __str__(self): v_str = "\n".join([f"VERTEX: {label}" for label in self._v_labels]) relations = [] for i in range(len(self._e_labels)): relations.extend( [(self._e_labels[i], src, dst) for src, dst in self._e_relationships[i]] ) e_str = "\n".join( [f"EDGE: {label}\tsrc: {src}\tdst: {dst}" for label, src, dst in relations] ) return f"graphscope.Graph\n{graph_def_pb2.GraphTypePb.Name(self._graph_type)}\n{v_str}\n{e_str}" def __repr__(self): return self.__str__() def unload(self): """Unload this graph from graphscope engine.""" if self._session is None: raise RuntimeError("The graph is not loaded") if self._key is None: self._session = None return # close interactive instances first try: if ( self._interactive_instance_launching_thread is not None and self._interactive_instance_launching_thread.is_alive() ): # join raises a RuntimeError if an attempt is made to join the current thread. # this exception occurs when a object collected by gc mechanism contains a running thread. if ( threading.current_thread() != self._interactive_instance_launching_thread ): self._interactive_instance_launching_thread.join() self._close_interactive_instances() except Exception as e: logger.error("Failed to close interactive instances: %s" % e) try: self._close_learning_instances() except Exception as e: logger.error("Failed to close learning instances: %s" % e) rlt = None if not self._detached: rlt = self._session._wrapper(self._graph_node.unload()) self._key = None self._session = None return rlt def _project_to_simple(self): return self._session._wrapper(self._graph_node._project_to_simple()) def add_column(self, results, selector): return self._session._wrapper(self._graph_node.add_column(results, selector)) def to_numpy(self, selector, vertex_range=None): """Select some elements of the graph and output to numpy. Args: selector (str): Select a portion of graph as a numpy.ndarray. vertex_range(dict, optional): Slice vertices. Defaults to None. Returns: `numpy.ndarray` """ self._check_unmodified() return self._session._wrapper(self._graph_node.to_numpy(selector, vertex_range)) def to_dataframe(self, selector, vertex_range=None): """Select some elements of the graph and output as a pandas.DataFrame Args: selector (dict): Select some portions of graph. vertex_range (dict, optional): Slice vertices. Defaults to None. Returns: `pandas.DataFrame` """ self._check_unmodified() return self._session._wrapper( self._graph_node.to_dataframe(selector, vertex_range) ) def is_directed(self): return self._directed def _check_unmodified(self): check_argument( self.signature == self._saved_signature, "Graph has been modified!" ) def _attach_interactive_instance(self, instance): """Store the instance when a new interactive instance is started. Args: instance: interactive instance """ self._interactive_instance_list.append(instance) def _attach_learning_instance(self, instance): """Store the instance when a new learning instance is created. Args: instance: learning instance """ self._learning_instance_list.append(instance) def save_to(self, path, **kwargs): """Serialize graph to a location. The meta and data of graph is dumped to specified location, and can be restored by `Graph.deserialize` in other sessions. Each worker will write a `path_{worker_id}.meta` file and a `path_{worker_id}` file to storage. Args: path (str): supported storages are local, hdfs, oss, s3 """ try: import vineyard import vineyard.io except ImportError: raise RuntimeError( "Saving context to locations requires 'vineyard', " "please install those two dependencies via " "\n" "\n" " pip3 install vineyard vineyard-io" "\n" "\n" ) sess = self._session deployment = "kubernetes" if sess.info["type"] == "k8s" else "ssh" conf = sess.info["engine_config"] vineyard_endpoint = conf["vineyard_rpc_endpoint"] vineyard_ipc_socket = conf["vineyard_socket"] if sess.info["type"] == "k8s": hosts = [ "{}:{}".format(sess.info["namespace"], s) for s in sess.info["engine_hosts"].split(",") ] else: # type == "hosts" hosts = sess.info["engine_hosts"].split(",") vineyard.io.serialize( path, vineyard.ObjectID(self._vineyard_id), type="global", vineyard_ipc_socket=vineyard_ipc_socket, vineyard_endpoint=vineyard_endpoint, storage_options=kwargs, deployment=deployment, hosts=hosts, ) @classmethod def load_from(cls, path, sess, **kwargs): """Construct a `Graph` by deserialize from `path`. It will read all serialization files, which is dumped by `Graph.serialize`. If any serialize file doesn't exists or broken, will error out. Args: path (str): Path contains the serialization files. sess (`graphscope.Session`): The target session that the graph will be construct in Returns: `Graph`: A new graph object. Schema and data is supposed to be identical with the one that called serialized method. """ try: import vineyard import vineyard.io except ImportError: raise RuntimeError( "Saving context to locations requires 'vineyard', " "please install those two dependencies via " "\n" "\n" " pip3 install vineyard vineyard-io" "\n" "\n" ) deployment = "kubernetes" if sess.info["type"] == "k8s" else "ssh" conf = sess.info["engine_config"] vineyard_endpoint = conf["vineyard_rpc_endpoint"] vineyard_ipc_socket = conf["vineyard_socket"] if sess.info["type"] == "k8s": hosts = [ "{}:{}".format(sess.info["namespace"], s) for s in sess.info["engine_hosts"].split(",") ] else: # type == "hosts" hosts = sess.info["engine_hosts"].split(",") graph_id = vineyard.io.deserialize( path, type="global", vineyard_ipc_socket=vineyard_ipc_socket, vineyard_endpoint=vineyard_endpoint, storage_options=kwargs, deployment=deployment, hosts=hosts, ) return sess._wrapper(GraphDAGNode(sess, vineyard.ObjectID(graph_id))) def add_vertices(self, vertices, label="_", properties=None, vid_field=0): if not self.loaded(): raise RuntimeError("The graph is not loaded") return self._session._wrapper( self._graph_node.add_vertices(vertices, label, properties, vid_field) ) def add_edges( self, edges, label="_", properties=None, src_label=None, dst_label=None, src_field=0, dst_field=1, ): if not self.loaded(): raise RuntimeError("The graph is not loaded") return self._session._wrapper( self._graph_node.add_edges( edges, label, properties, src_label, dst_label, src_field, dst_field ) ) def project( self, vertices: Mapping[str, Union[List[str], None]], edges: Mapping[str, Union[List[str], None]], ): if not self.loaded(): raise RuntimeError("The graph is not loaded") return self._session._wrapper(self._graph_node.project(vertices, edges)) class UnloadedGraph(DAGNode): """Unloaded graph node in a DAG.""" def __init__(self, session, op): self._session = session self._op = op # add op to dag self._session.dag.add_op(self._op)
[ "logging.getLogger", "vineyard.io.deserialize", "graphscope.framework.utils.normalize_data_type_str", "graphscope.proto.graph_def_pb2.VineyardInfoPb", "graphscope.proto.graph_def_pb2.GraphTypePb.Name", "copy.deepcopy", "graphscope.framework.utils.transform_vertex_range", "json.dumps", "graphscope.framework.dag_utils.graph_to_dataframe", "graphscope.framework.dag_utils.graph_to_numpy", "graphscope.framework.dag_utils.add_column", "graphscope.framework.utils.s_to_attr", "graphscope.framework.dag_utils.dynamic_to_arrow", "graphscope.framework.dag_utils.create_loader", "graphscope.framework.dag_utils.copy_graph", "graphscope.framework.graph_utils.VertexLabel", "graphscope.framework.graph_utils.EdgeSubLabel", "graphscope.framework.errors.check_argument", "graphscope.framework.graph_utils.EdgeLabel", "graphscope.framework.dag_utils.add_labels_to_graph", "graphscope.framework.graph_schema.GraphSchema", "graphscope.framework.dag_utils.project_arrow_property_graph_to_simple", "graphscope.framework.dag_utils.unload_graph", "threading.current_thread", "graphscope.proto.attr_value_pb2.AttrValue", "graphscope.framework.utils.b_to_attr", "graphscope.framework.context.ResultDAGNode", "graphscope.framework.dag_utils.create_graph", "vineyard.ObjectID", "threading.Thread", "graphscope.framework.utils.data_type_to_cpp" ]
[((1730, 1761), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1747, 1761), False, 'import logging\n'), ((3839, 3868), 'graphscope.framework.dag_utils.dynamic_to_arrow', 'dag_utils.dynamic_to_arrow', (['g'], {}), '(g)\n', (3865, 3868), False, 'from graphscope.framework import dag_utils\n'), ((4837, 4858), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(True)'], {}), '(True)\n', (4852, 4858), False, 'from graphscope.framework import utils\n'), ((5121, 5147), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""int64_t"""'], {}), "('int64_t')\n", (5136, 5147), False, 'from graphscope.framework import utils\n'), ((5185, 5212), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (5200, 5212), False, 'from graphscope.framework import utils\n'), ((5228, 5316), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['self.session_id', 'graph_def_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(self.session_id, graph_def_pb2.ARROW_PROPERTY, attrs\n =config)\n', (5250, 5316), False, 'from graphscope.framework import dag_utils\n'), ((5507, 5528), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(True)'], {}), '(True)\n', (5522, 5528), False, 'from graphscope.framework import utils\n'), ((5795, 5821), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""int64_t"""'], {}), "('int64_t')\n", (5810, 5821), False, 'from graphscope.framework import utils\n'), ((5859, 5886), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (5874, 5886), False, 'from graphscope.framework import utils\n'), ((5902, 5990), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['self.session_id', 'graph_def_pb2.ARROW_PROPERTY'], {'attrs': 'config'}), '(self.session_id, graph_def_pb2.ARROW_PROPERTY, attrs\n =config)\n', (5924, 5990), False, 'from graphscope.framework import dag_utils\n'), ((6127, 6153), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {}), '()\n', (6151, 6153), False, 'from graphscope.proto import attr_value_pb2\n'), ((6191, 6222), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['self._directed'], {}), '(self._directed)\n', (6206, 6222), False, 'from graphscope.framework import utils\n'), ((6264, 6299), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['self._generate_eid'], {}), '(self._generate_eid)\n', (6279, 6299), False, 'from graphscope.framework import utils\n'), ((6337, 6368), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self._oid_type'], {}), '(self._oid_type)\n', (6352, 6368), False, 'from graphscope.framework import utils\n'), ((6406, 6433), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (6421, 6433), False, 'from graphscope.framework import utils\n'), ((6482, 6504), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(False)'], {}), '(False)\n', (6497, 6504), False, 'from graphscope.framework import utils\n'), ((6520, 6620), 'graphscope.framework.dag_utils.create_graph', 'dag_utils.create_graph', (['self.session_id', 'graph_def_pb2.ARROW_PROPERTY'], {'inputs': 'None', 'attrs': 'config'}), '(self.session_id, graph_def_pb2.ARROW_PROPERTY,\n inputs=None, attrs=config)\n', (6542, 6620), False, 'from graphscope.framework import dag_utils\n'), ((8705, 8744), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['oid_type'], {}), '(oid_type)\n', (8734, 8744), False, 'from graphscope.framework import utils\n'), ((10191, 10254), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == graph_def_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == graph_def_pb2.ARROW_PROPERTY)\n', (10205, 10254), False, 'from graphscope.framework.errors import check_argument\n'), ((10268, 10322), 'graphscope.framework.dag_utils.project_arrow_property_graph_to_simple', 'dag_utils.project_arrow_property_graph_to_simple', (['self'], {}), '(self)\n', (10316, 10322), False, 'from graphscope.framework import dag_utils\n'), ((12417, 12480), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == graph_def_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == graph_def_pb2.ARROW_PROPERTY)\n', (12431, 12480), False, 'from graphscope.framework.errors import check_argument\n'), ((12504, 12546), 'graphscope.framework.utils.transform_vertex_range', 'utils.transform_vertex_range', (['vertex_range'], {}), '(vertex_range)\n', (12532, 12546), False, 'from graphscope.framework import utils\n'), ((12560, 12614), 'graphscope.framework.dag_utils.graph_to_numpy', 'dag_utils.graph_to_numpy', (['self', 'selector', 'vertex_range'], {}), '(self, selector, vertex_range)\n', (12584, 12614), False, 'from graphscope.framework import dag_utils\n'), ((12630, 12653), 'graphscope.framework.context.ResultDAGNode', 'ResultDAGNode', (['self', 'op'], {}), '(self, op)\n', (12643, 12653), False, 'from graphscope.framework.context import ResultDAGNode\n'), ((13221, 13284), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == graph_def_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == graph_def_pb2.ARROW_PROPERTY)\n', (13235, 13284), False, 'from graphscope.framework.errors import check_argument\n'), ((13436, 13456), 'json.dumps', 'json.dumps', (['selector'], {}), '(selector)\n', (13446, 13456), False, 'import json\n'), ((13480, 13522), 'graphscope.framework.utils.transform_vertex_range', 'utils.transform_vertex_range', (['vertex_range'], {}), '(vertex_range)\n', (13508, 13522), False, 'from graphscope.framework import utils\n'), ((13536, 13594), 'graphscope.framework.dag_utils.graph_to_dataframe', 'dag_utils.graph_to_dataframe', (['self', 'selector', 'vertex_range'], {}), '(self, selector, vertex_range)\n', (13564, 13594), False, 'from graphscope.framework import dag_utils\n'), ((13610, 13633), 'graphscope.framework.context.ResultDAGNode', 'ResultDAGNode', (['self', 'op'], {}), '(self, op)\n', (13623, 13633), False, 'from graphscope.framework.context import ResultDAGNode\n'), ((14691, 14734), 'copy.deepcopy', 'deepcopy', (['self._unsealed_vertices_and_edges'], {}), '(self._unsealed_vertices_and_edges)\n', (14699, 14734), False, 'from copy import deepcopy\n'), ((14758, 14909), 'graphscope.framework.graph_utils.VertexLabel', 'VertexLabel', ([], {'label': 'label', 'loader': 'vertices', 'properties': 'properties', 'vid_field': 'vid_field', 'id_type': 'self._oid_type', 'session_id': 'self._session.session_id'}), '(label=label, loader=vertices, properties=properties, vid_field=\n vid_field, id_type=self._oid_type, session_id=self._session.session_id)\n', (14769, 14909), False, 'from graphscope.framework.graph_utils import VertexLabel\n'), ((15079, 15103), 'copy.deepcopy', 'deepcopy', (['self._v_labels'], {}), '(self._v_labels)\n', (15087, 15103), False, 'from copy import deepcopy\n'), ((15201, 15238), 'graphscope.framework.dag_utils.create_loader', 'dag_utils.create_loader', (['vertex_label'], {}), '(vertex_label)\n', (15224, 15238), False, 'from graphscope.framework import dag_utils\n'), ((15329, 15375), 'graphscope.framework.dag_utils.add_labels_to_graph', 'dag_utils.add_labels_to_graph', (['self', 'loader_op'], {}), '(self, loader_op)\n', (15358, 15375), False, 'from graphscope.framework import dag_utils\n'), ((18864, 18958), 'graphscope.framework.errors.check_argument', 'check_argument', (['(src_field != dst_field)', '"""src and dst field cannot refer to the same field"""'], {}), "(src_field != dst_field,\n 'src and dst field cannot refer to the same field')\n", (18878, 18958), False, 'from graphscope.framework.errors import check_argument\n'), ((19209, 19233), 'copy.deepcopy', 'deepcopy', (['self._e_labels'], {}), '(self._e_labels)\n', (19217, 19233), False, 'from copy import deepcopy\n'), ((19254, 19285), 'copy.deepcopy', 'deepcopy', (['self._e_relationships'], {}), '(self._e_relationships)\n', (19262, 19285), False, 'from copy import deepcopy\n'), ((21569, 21628), 'graphscope.framework.dag_utils.create_loader', 'dag_utils.create_loader', (['(unsealed_vertices + unsealed_edges)'], {}), '(unsealed_vertices + unsealed_edges)\n', (21592, 21628), False, 'from graphscope.framework import dag_utils\n'), ((21719, 21767), 'graphscope.framework.dag_utils.add_labels_to_graph', 'dag_utils.add_labels_to_graph', (['parent', 'loader_op'], {}), '(parent, loader_op)\n', (21748, 21767), False, 'from graphscope.framework import dag_utils\n'), ((23501, 23521), 'json.dumps', 'json.dumps', (['selector'], {}), '(selector)\n', (23511, 23521), False, 'import json\n'), ((23535, 23580), 'graphscope.framework.dag_utils.add_column', 'dag_utils.add_column', (['self', 'results', 'selector'], {}), '(self, results, selector)\n', (23555, 23580), False, 'from graphscope.framework import dag_utils\n'), ((23917, 23945), 'graphscope.framework.dag_utils.unload_graph', 'dag_utils.unload_graph', (['self'], {}), '(self)\n', (23939, 23945), False, 'from graphscope.framework import dag_utils\n'), ((25068, 25131), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.graph_type == graph_def_pb2.ARROW_PROPERTY)'], {}), '(self.graph_type == graph_def_pb2.ARROW_PROPERTY)\n', (25082, 25131), False, 'from graphscope.framework.errors import check_argument\n'), ((26626, 26655), 'copy.deepcopy', 'deepcopy', (['self._graph_node.op'], {}), '(self._graph_node.op)\n', (26634, 26655), False, 'from copy import deepcopy\n'), ((26831, 26844), 'graphscope.framework.graph_schema.GraphSchema', 'GraphSchema', ([], {}), '()\n', (26842, 26844), False, 'from graphscope.framework.graph_schema import GraphSchema\n'), ((28414, 28444), 'graphscope.proto.graph_def_pb2.VineyardInfoPb', 'graph_def_pb2.VineyardInfoPb', ([], {}), '()\n', (28442, 28444), False, 'from graphscope.proto import graph_def_pb2\n'), ((28562, 28596), 'graphscope.framework.utils.data_type_to_cpp', 'data_type_to_cpp', (['vy_info.oid_type'], {}), '(vy_info.oid_type)\n', (28578, 28596), False, 'from graphscope.framework.utils import data_type_to_cpp\n'), ((30479, 30524), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['self._oid_type'], {}), '(self._oid_type)\n', (30508, 30524), False, 'from graphscope.framework import utils\n'), ((30587, 30634), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['self._schema.vdata_type'], {}), '(self._schema.vdata_type)\n', (30609, 30634), False, 'from graphscope.framework import utils\n'), ((30656, 30703), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['self._schema.edata_type'], {}), '(self._schema.edata_type)\n', (30678, 30703), False, 'from graphscope.framework import utils\n'), ((35365, 35452), 'graphscope.framework.errors.check_argument', 'check_argument', (['(self.signature == self._saved_signature)', '"""Graph has been modified!"""'], {}), "(self.signature == self._saved_signature,\n 'Graph has been modified!')\n", (35379, 35452), False, 'from graphscope.framework.errors import check_argument\n'), ((39292, 39483), 'vineyard.io.deserialize', 'vineyard.io.deserialize', (['path'], {'type': '"""global"""', 'vineyard_ipc_socket': 'vineyard_ipc_socket', 'vineyard_endpoint': 'vineyard_endpoint', 'storage_options': 'kwargs', 'deployment': 'deployment', 'hosts': 'hosts'}), "(path, type='global', vineyard_ipc_socket=\n vineyard_ipc_socket, vineyard_endpoint=vineyard_endpoint,\n storage_options=kwargs, deployment=deployment, hosts=hosts)\n", (39315, 39483), False, 'import vineyard\n'), ((20840, 20883), 'copy.deepcopy', 'deepcopy', (['self._unsealed_vertices_and_edges'], {}), '(self._unsealed_vertices_and_edges)\n', (20848, 20883), False, 'from copy import deepcopy\n'), ((20998, 21056), 'graphscope.framework.graph_utils.EdgeLabel', 'EdgeLabel', (['label', 'self._oid_type', 'self._session.session_id'], {}), '(label, self._oid_type, self._session.session_id)\n', (21007, 21056), False, 'from graphscope.framework.graph_utils import EdgeLabel\n'), ((25203, 25223), 'json.dumps', 'json.dumps', (['vertices'], {}), '(vertices)\n', (25213, 25223), False, 'import json\n'), ((25225, 25242), 'json.dumps', 'json.dumps', (['edges'], {}), '(edges)\n', (25235, 25242), False, 'import json\n'), ((29204, 29276), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._launch_interactive_instance_impl', 'args': '()'}), '(target=self._launch_interactive_instance_impl, args=())\n', (29220, 29276), False, 'import threading\n'), ((37418, 37454), 'vineyard.ObjectID', 'vineyard.ObjectID', (['self._vineyard_id'], {}), '(self._vineyard_id)\n', (37435, 37454), False, 'import vineyard\n'), ((21110, 21213), 'graphscope.framework.graph_utils.EdgeSubLabel', 'EdgeSubLabel', (['edges', 'properties', 'src_label', 'dst_label', 'src_field', 'dst_field'], {'id_type': 'self._oid_type'}), '(edges, properties, src_label, dst_label, src_field, dst_field,\n id_type=self._oid_type)\n', (21122, 21213), False, 'from graphscope.framework.graph_utils import EdgeSubLabel\n'), ((32513, 32561), 'graphscope.proto.graph_def_pb2.GraphTypePb.Name', 'graph_def_pb2.GraphTypePb.Name', (['self._graph_type'], {}), '(self._graph_type)\n', (32543, 32561), False, 'from graphscope.proto import graph_def_pb2\n'), ((39618, 39645), 'vineyard.ObjectID', 'vineyard.ObjectID', (['graph_id'], {}), '(graph_id)\n', (39635, 39645), False, 'import vineyard\n'), ((33371, 33397), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (33395, 33397), False, 'import threading\n'), ((11242, 11277), 'graphscope.framework.dag_utils.copy_graph', 'dag_utils.copy_graph', (['incoming_data'], {}), '(incoming_data)\n', (11262, 11277), False, 'from graphscope.framework import dag_utils\n'), ((19911, 20014), 'graphscope.framework.graph_utils.EdgeSubLabel', 'EdgeSubLabel', (['edges', 'properties', 'src_label', 'dst_label', 'src_field', 'dst_field'], {'id_type': 'self._oid_type'}), '(edges, properties, src_label, dst_label, src_field, dst_field,\n id_type=self._oid_type)\n', (19923, 20014), False, 'from graphscope.framework.graph_utils import EdgeSubLabel\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file adjlist.py is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/readwrite/adjlist.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import networkx.readwrite.adjlist from networkx.readwrite.adjlist import parse_adjlist as _parse_adjlist from networkx.readwrite.adjlist import read_adjlist as _read_adjlist from networkx.utils.decorators import open_file from graphscope.experimental import nx from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import patch_docstring import_as_graphscope_nx(networkx.readwrite.adjlist) @patch_docstring(_parse_adjlist) def parse_adjlist( lines, comments="#", delimiter=None, create_using=None, nodetype=None ): G = nx.empty_graph(0, create_using) edges = [] for line in lines: p = line.find(comments) if p >= 0: line = line[:p] if not len(line): continue vlist = line.strip().split(delimiter) u = vlist.pop(0) # convert types if nodetype is not None: try: u = nodetype(u) except Exception as e: raise TypeError( "Failed to convert node ({}) to type {}".format(u, nodetype) ) from e if nodetype is not None: try: vlist = map(nodetype, vlist) except Exception as e: raise TypeError( "Failed to convert nodes ({}) to type {}".format( ",".join(vlist), nodetype ) ) from e edges.extend([u, v] for v in vlist) G.add_edges_from(edges) return G @open_file(0, mode="rb") @patch_docstring(_read_adjlist) def read_adjlist( path, comments="#", delimiter=None, create_using=None, nodetype=None, encoding="utf-8", ): lines = (line.decode(encoding) for line in path) return parse_adjlist( lines, comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, ) # fixture for pytest def teardown_module(module): import os for fname in ["test.adjlist", "test.adjlist.gz"]: if os.path.isfile(fname): os.unlink(fname)
[ "graphscope.experimental.nx.utils.compat.patch_docstring", "graphscope.experimental.nx.empty_graph", "os.path.isfile", "graphscope.experimental.nx.utils.compat.import_as_graphscope_nx", "os.unlink", "networkx.utils.decorators.open_file" ]
[((903, 954), 'graphscope.experimental.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.readwrite.adjlist'], {}), '(networkx.readwrite.adjlist)\n', (926, 954), False, 'from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx\n'), ((958, 989), 'graphscope.experimental.nx.utils.compat.patch_docstring', 'patch_docstring', (['_parse_adjlist'], {}), '(_parse_adjlist)\n', (973, 989), False, 'from graphscope.experimental.nx.utils.compat import patch_docstring\n'), ((2059, 2082), 'networkx.utils.decorators.open_file', 'open_file', (['(0)'], {'mode': '"""rb"""'}), "(0, mode='rb')\n", (2068, 2082), False, 'from networkx.utils.decorators import open_file\n'), ((2084, 2114), 'graphscope.experimental.nx.utils.compat.patch_docstring', 'patch_docstring', (['_read_adjlist'], {}), '(_read_adjlist)\n', (2099, 2114), False, 'from graphscope.experimental.nx.utils.compat import patch_docstring\n'), ((1094, 1125), 'graphscope.experimental.nx.empty_graph', 'nx.empty_graph', (['(0)', 'create_using'], {}), '(0, create_using)\n', (1108, 1125), False, 'from graphscope.experimental import nx\n'), ((2598, 2619), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (2612, 2619), False, 'import os\n'), ((2633, 2649), 'os.unlink', 'os.unlink', (['fname'], {}), '(fname)\n', (2642, 2649), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import functools import hashlib import json import logging import os import zipfile from copy import deepcopy from io import BytesIO import yaml from graphscope.framework.context import create_context_node from graphscope.framework.dag import DAGNode from graphscope.framework.dag_utils import bind_app from graphscope.framework.dag_utils import create_app from graphscope.framework.dag_utils import unload_app from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.errors import check_argument from graphscope.framework.utils import graph_type_to_cpp_class from graphscope.proto import graph_def_pb2 logger = logging.getLogger("graphscope") DEFAULT_GS_CONFIG_FILE = ".gs_conf.yaml" def project_to_simple(func): @functools.wraps(func) def wrapper(*args, **kwargs): graph = args[0] if not hasattr(graph, "graph_type"): raise InvalidArgumentError("Missing graph_type attribute in graph object.") if graph.graph_type == graph_def_pb2.ARROW_PROPERTY: graph = graph._project_to_simple() return func(graph, *args[1:], **kwargs) return wrapper def not_compatible_for(*graph_types): """Decorator to mark builtin algorithms as not compatible with graph. Args: graph_types: list of string Entries must be one of 'arrow_property', 'dynamic_property', 'arrow_projected', 'dynamic_projected' Returns: The decorated function. Raises: RuntimeError: If graph is not compatible. KeyError: If parameter is not correctly. Notes: Multiple types or use multiple @not_compatible_for() lines are joined logically with "or". Examples: >>> @not_compatible_for('arrow_property', 'dynamic_property') >>> def sssp(G, src): >>> pass """ def _not_compatible_for(not_compatible_for_func): @functools.wraps(not_compatible_for_func) def wrapper(*args, **kwargs): graph = args[0] if not hasattr(graph, "graph_type"): raise InvalidArgumentError( "Missing graph_type attribute in graph object." ) terms = { "arrow_property": graph.graph_type == graph_def_pb2.ARROW_PROPERTY, "dynamic_property": graph.graph_type == graph_def_pb2.DYNAMIC_PROPERTY, "arrow_projected": graph.graph_type == graph_def_pb2.ARROW_PROJECTED, "dynamic_projected": graph.graph_type == graph_def_pb2.DYNAMIC_PROJECTED, "arrow_flattened": graph.graph_type == graph_def_pb2.ARROW_FLATTENED, } match = False try: for t in graph_types: match = match or terms[t] except KeyError: raise InvalidArgumentError( "Use one or more of arrow_property,dynamic_property,arrow_projected,dynamic_projected,arrow_flattened", ) if match: raise InvalidArgumentError( "Not compatible for %s type" % " ".join(graph_types) ) else: return not_compatible_for_func(*args, **kwargs) return wrapper return _not_compatible_for class AppAssets(DAGNode): """A class represents an app asset node in a DAG that holds the bytes of the gar resource. Assets includes an algorithm name, and gar (for user defined algorithm), a context type (one of 'tensor', 'vertex_data', 'vertex_property', 'labeled_vertex_data', 'dynamic_vertex_data', 'labeled_vertex_property'), and its type (one of `cpp_pie`, `cython_pie`, `cython_pregel`), The instance of this class can be passed to init :class:`graphscope.framework.app.AppDAGNode` """ _support_context_type = [ "tensor", "vertex_data", "vertex_property", "labeled_vertex_data", "dynamic_vertex_data", "labeled_vertex_property", ] def __init__(self, algo, context=None, gar=None): """Init assets of the algorithm. Args: algo (str): Represent specific algo inside resource. context (str): Type of context that hold the calculation results. It will get from gar if param is None. Defaults to None. gar (bytes or BytesIO, optional): The bytes that encodes the application's source code. Defaults to None. """ self._algo = algo self._context_type = context self._type = "cpp_pie" # default is builtin app with `built_in` type self._meta = {} # used for gar resource if gar and isinstance(gar, (BytesIO, bytes)): self._gar = gar if isinstance(gar, bytes) else gar.getvalue() self._extract_meta_info() else: # built_in apps has no gar resource. self._gar = None if self._context_type not in self._support_context_type: raise InvalidArgumentError( "Unsupport context type: {0}".format(self._context_type) ) self._op = create_app(self) def __repr__(self) -> str: return f"graphscope.framework.app.AppAssets <type: {self._type}, algo: {self._algo}, context: {self._context_type}>" def _extract_meta_info(self): """Extract app meta info from gar resource. Raises: InvalidArgumentError: - :code:`gs_conf.yaml` not exist in gar resource. - App not found in gar resource. """ fp = BytesIO(self._gar) archive = zipfile.ZipFile(fp, "r") config = yaml.safe_load(archive.read(DEFAULT_GS_CONFIG_FILE)) # default app will used if there is only one app in it if self._algo is None and len(config["app"]) == 1: self._algo = config["app"][0]["algo"] logger.info("Default app %s will be used.", self._algo) for meta in config["app"]: if self._algo == meta["algo"]: if "context_type" in meta: self._context_type = meta["context_type"] self._type = meta["type"] self._meta = meta return raise InvalidArgumentError("App not found in gar: {}".format(self._algo)) @property def algo(self): """Algorithm name, e.g. sssp, pagerank. Returns: str: Algorithm name of this asset. """ return self._algo @property def context_type(self): """Context type, e.g. vertex_property, labeled_vertex_data. Returns: str: Type of the app context. """ return self._context_type @property def type(self): """Algorithm type, one of `cpp_pie`, `cython_pie`, `java_pie` or `cython_pregel`. Returns: str: Algorithm type of this asset. """ return self._type @property def gar(self): """Gar resource. Returns: bytes: gar resource of this asset. """ return self._gar @classmethod def to_gar(cls, path): if os.path.exists(path): raise RuntimeError("Path exist: {}.".format(path)) with open(path, "wb") as f: f.write(cls._gar) @classmethod def bytes(cls): return cls._gar @property def signature(self): """Generate a signature of the app assets by its algo name (and gar resources). Used to uniquely identify a app assets. Returns: str: signature of this assets """ s = hashlib.sha256() s.update(self._algo.encode("utf-8")) if self._gar: s.update(self._gar) return s.hexdigest() def is_compatible(self, graph): """Determine if this algorithm can run on this type of graph. Args: graph (:class:`GraphDAGNode`): A graph instance. Raises: InvalidArgumentError: - App is not compatible with graph ScannerError: - Yaml file format is incorrect. """ # builtin app if self._gar is None: return # check yaml file graph_type = graph_type_to_cpp_class(graph.graph_type) if graph_type not in self._meta["compatible_graph"]: raise InvalidArgumentError( "App is uncompatible with graph {}".format(graph_type) ) return True def __call__(self, graph, *args, **kwargs): """Instantiate an App and do queries over it.""" app_ = graph.session._wrapper(AppDAGNode(graph, self)) return app_(*args, **kwargs) class AppDAGNode(DAGNode): """A class represents a app node in a DAG. In GraphScope, an app node binding a concrete graph node that query executed on. """ def __init__(self, graph, app_assets: AppAssets): """Create an application using given :code:`gar` file, or given application class name. Args: graph (:class:`GraphDAGNode`): A :class:`GraphDAGNode` instance. app_assets: A :class:`AppAssets` instance. """ self._graph = graph self._app_assets = app_assets self._session = graph.session self._app_assets.is_compatible(self._graph) self._op = bind_app(graph, self._app_assets) # add app_assets op to dag is not exist if not self._session.dag.exists(self._app_assets.op): self._session.dag.add_op(self._app_assets.op) # add op to dag self._session.dag.add_op(self._op) def __repr__(self): s = f"graphscope.App <type: {self._app_assets.type}, algorithm: {self._app_assets.algo} " s += f"bounded_graph: {str(self._graph)}>" return s @property def algo(self): """Algorithm name, e.g. sssp, pagerank. Returns: str: Algorithm name of this asset. """ return self._app_assets.algo @property def gar(self): """Gar resource. Returns: bytes: gar resource of this asset. """ return self._app_assets.gar def __call__(self, *args, **kwargs): """When called, check arguments based on app type, Then do build and query. Raises: InvalidArgumentError: If app_type is None, or positional argument found when app_type not `cpp_pie`. Returns: :class:`Context`: Query context, include running results of the app. """ app_type = self._app_assets.type check_argument(app_type is not None) context_type = self._app_assets.context_type if not isinstance(self._graph, DAGNode) and not self._graph.loaded(): raise RuntimeError("The graph is not loaded") if self._app_assets.type in ["cython_pie", "cython_pregel", "java_pie"]: # cython app support kwargs only check_argument( not args, "Only support using keyword arguments in cython app." ) return create_context_node( context_type, self, self._graph, json.dumps(kwargs) ) return create_context_node(context_type, self, self._graph, *args, **kwargs) def unload(self): """Unload this app from graphscope engine. Returns: :class:`graphscope.framework.app.UnloadedApp`: Evaluated in eager mode. """ op = unload_app(self) return UnloadedApp(self._session, op) class App(object): """An application that can run on graphs and produce results. Analytical engine will build the app dynamic library when instantiate a app instance. And the dynamic library will be reused if subsequent app's signature matches one of previous ones. """ def __init__(self, app_node, key): self._app_node = app_node self._session = self._app_node.session self._key = key # copy and set op evaluated self._app_node.op = deepcopy(self._app_node.op) self._app_node.evaluated = True self._session.dag.add_op(self._app_node.op) self._saved_signature = self.signature def __getattr__(self, name): if hasattr(self._app_node, name): return getattr(self._app_node, name) raise AttributeError("{0} not found.".format(name)) @property def key(self): """A unique identifier of App.""" return self._key @property def signature(self): """Signature is computed by all critical components of the App.""" return hashlib.sha256( "{}.{}".format(self._app_assets.signature, self._graph.template_str).encode( "utf-8" ) ).hexdigest() def unload(self): """Unload app. Both on engine side and python side. Set the key to None.""" rlt = self._session._wrapper(self._app_node.unload()) self._key = None self._session = None return rlt def __call__(self, *args, **kwargs): return self._session._wrapper(self._app_node(*args, **kwargs)) class UnloadedApp(DAGNode): """Unloaded app node in a DAG.""" def __init__(self, session, op): self._session = session self._op = op # add op to dag self._session.dag.add_op(self._op) def load_app(gar=None, algo=None, context=None, **kwargs): """Load an app from gar. bytes or the resource of the specified path or bytes. Args: algo: str Algo name inside resource. None will extract name from gar resource if there is only one app in it. gar: bytes or BytesIO or str str represent the path of resource. Returns: Instance of <graphscope.framework.app.AppAssets> Raises: FileNotFoundError: File not exist. PermissionError: Permission denied of path. TypeError: File is not a zip file. Examples: >>> sssp = load_app(gar='./resource.gar', algo='sssp') >>> sssp(src=4) which will have following `.gs_conf.yaml` in resource.gar: app: - algo: sssp type: cpp_pie class_name: grape:SSSP context_type: vertex_data src: sssp/sssp.h compatible_graph: - gs::ArrowProjectedFragment """ if isinstance(gar, (BytesIO, bytes)): return AppAssets(algo, context, gar, **kwargs) elif isinstance(gar, str): with open(gar, "rb") as f: content = f.read() if not zipfile.is_zipfile(gar): raise InvalidArgumentError("{} is not a zip file.".format(gar)) return AppAssets(algo, context, content, **kwargs) else: raise InvalidArgumentError("Wrong type with {}".format(gar))
[ "logging.getLogger", "graphscope.framework.dag_utils.create_app", "os.path.exists", "hashlib.sha256", "zipfile.is_zipfile", "zipfile.ZipFile", "graphscope.framework.errors.InvalidArgumentError", "json.dumps", "io.BytesIO", "graphscope.framework.dag_utils.bind_app", "functools.wraps", "graphscope.framework.context.create_context_node", "graphscope.framework.utils.graph_type_to_cpp_class", "graphscope.framework.dag_utils.unload_app", "copy.deepcopy", "graphscope.framework.errors.check_argument" ]
[((1312, 1343), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1329, 1343), False, 'import logging\n'), ((1422, 1443), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (1437, 1443), False, 'import functools\n'), ((2569, 2609), 'functools.wraps', 'functools.wraps', (['not_compatible_for_func'], {}), '(not_compatible_for_func)\n', (2584, 2609), False, 'import functools\n'), ((5844, 5860), 'graphscope.framework.dag_utils.create_app', 'create_app', (['self'], {}), '(self)\n', (5854, 5860), False, 'from graphscope.framework.dag_utils import create_app\n'), ((6295, 6313), 'io.BytesIO', 'BytesIO', (['self._gar'], {}), '(self._gar)\n', (6302, 6313), False, 'from io import BytesIO\n'), ((6332, 6356), 'zipfile.ZipFile', 'zipfile.ZipFile', (['fp', '"""r"""'], {}), "(fp, 'r')\n", (6347, 6356), False, 'import zipfile\n'), ((7879, 7899), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (7893, 7899), False, 'import os\n'), ((8353, 8369), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (8367, 8369), False, 'import hashlib\n'), ((8989, 9030), 'graphscope.framework.utils.graph_type_to_cpp_class', 'graph_type_to_cpp_class', (['graph.graph_type'], {}), '(graph.graph_type)\n', (9012, 9030), False, 'from graphscope.framework.utils import graph_type_to_cpp_class\n'), ((10112, 10145), 'graphscope.framework.dag_utils.bind_app', 'bind_app', (['graph', 'self._app_assets'], {}), '(graph, self._app_assets)\n', (10120, 10145), False, 'from graphscope.framework.dag_utils import bind_app\n'), ((11373, 11409), 'graphscope.framework.errors.check_argument', 'check_argument', (['(app_type is not None)'], {}), '(app_type is not None)\n', (11387, 11409), False, 'from graphscope.framework.errors import check_argument\n'), ((11987, 12056), 'graphscope.framework.context.create_context_node', 'create_context_node', (['context_type', 'self', 'self._graph', '*args'], {}), '(context_type, self, self._graph, *args, **kwargs)\n', (12006, 12056), False, 'from graphscope.framework.context import create_context_node\n'), ((12258, 12274), 'graphscope.framework.dag_utils.unload_app', 'unload_app', (['self'], {}), '(self)\n', (12268, 12274), False, 'from graphscope.framework.dag_utils import unload_app\n'), ((12823, 12850), 'copy.deepcopy', 'deepcopy', (['self._app_node.op'], {}), '(self._app_node.op)\n', (12831, 12850), False, 'from copy import deepcopy\n'), ((1565, 1634), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Missing graph_type attribute in graph object."""'], {}), "('Missing graph_type attribute in graph object.')\n", (1585, 1634), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((11739, 11818), 'graphscope.framework.errors.check_argument', 'check_argument', (['(not args)', '"""Only support using keyword arguments in cython app."""'], {}), "(not args, 'Only support using keyword arguments in cython app.')\n", (11753, 11818), False, 'from graphscope.framework.errors import check_argument\n'), ((2747, 2816), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Missing graph_type attribute in graph object."""'], {}), "('Missing graph_type attribute in graph object.')\n", (2767, 2816), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((11938, 11956), 'json.dumps', 'json.dumps', (['kwargs'], {}), '(kwargs)\n', (11948, 11956), False, 'import json\n'), ((15395, 15418), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['gar'], {}), '(gar)\n', (15413, 15418), False, 'import zipfile\n'), ((3520, 3654), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Use one or more of arrow_property,dynamic_property,arrow_projected,dynamic_projected,arrow_flattened"""'], {}), "(\n 'Use one or more of arrow_property,dynamic_property,arrow_projected,dynamic_projected,arrow_flattened'\n )\n", (3540, 3654), False, 'from graphscope.framework.errors import InvalidArgumentError\n')]
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Coordinator between client and engines""" import argparse import atexit import hashlib import json import logging import os import queue import random import signal import string import sys import threading import time import urllib.parse import urllib.request from concurrent import futures from io import StringIO import grpc from gscoordinator.io_utils import StdoutWrapper # capture system stdout sys.stdout = StdoutWrapper(sys.stdout) from graphscope.proto import attr_value_pb2 from graphscope.proto import coordinator_service_pb2_grpc from graphscope.proto import engine_service_pb2_grpc from graphscope.proto import error_codes_pb2 from graphscope.proto import message_pb2 from graphscope.proto import op_def_pb2 from graphscope.proto import types_pb2 from gscoordinator.cluster import KubernetesClusterLauncher from gscoordinator.launcher import LocalLauncher from gscoordinator.object_manager import GraphMeta from gscoordinator.object_manager import LibMeta from gscoordinator.object_manager import ObjectManager from gscoordinator.utils import compile_app from gscoordinator.utils import compile_graph_frame from gscoordinator.utils import create_single_op_dag from gscoordinator.utils import distribute_lib_on_k8s from gscoordinator.utils import distribute_lib_via_hosts from gscoordinator.utils import dump_string from gscoordinator.utils import get_app_sha256 from gscoordinator.utils import get_graph_sha256 from gscoordinator.utils import get_lib_path from gscoordinator.utils import str2bool from gscoordinator.utils import to_maxgraph_schema from gscoordinator.version import __version__ COORDINATOR_HOME = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) GRAPHSCOPE_HOME = os.path.join(COORDINATOR_HOME, "..") WORKSPACE = "/tmp/gs" DEFAULT_GS_CONFIG_FILE = ".gs_conf.yaml" ANALYTICAL_ENGINE_HOME = os.path.join(GRAPHSCOPE_HOME, "analytical_engine") ANALYTICAL_ENGINE_PATH = os.path.join(ANALYTICAL_ENGINE_HOME, "build", "grape_engine") TEMPLATE_DIR = os.path.join(COORDINATOR_HOME, "gscoordinator", "template") BUILTIN_APP_RESOURCE_PATH = os.path.join( COORDINATOR_HOME, "gscoordinator", "builtin/app/builtin_app.gar" ) GS_DEBUG_ENDPOINT = os.environ.get("GS_DEBUG_ENDPOINT", "") ENGINE_CONTAINER = "engine" VINEYARD_CONTAINER = "vineyard" MAXGRAPH_MANAGER_HOST = "http://%s.%s.svc.cluster.local:8080" logger = logging.getLogger("graphscope") class CoordinatorServiceServicer( coordinator_service_pb2_grpc.CoordinatorServiceServicer ): """Provides methods that implement functionality of master service server. Holding: 1. process: the grape-engine process. 2. session_id: the handle for a particular session to engine 3. vineyard_ipc_socket: returned by grape-engine 4. vineyard_rpc_socket: returned by grape-engine 5. engine_endpoint: the endpoint of grape-engine 6. engine_servicer: grpc connection to grape-engine """ def __init__(self, launcher, dangling_timeout_seconds, log_level="INFO"): self._launcher = launcher self._request = None self._object_manager = ObjectManager() self._dangling_detecting_timer = None self._config_logging(log_level) # only one connection is allowed at the same time # generate session id when a client connection is established self._session_id = None # launch engines if len(GS_DEBUG_ENDPOINT) > 0: logger.info( "Coordinator will connect to engine with endpoint: " + GS_DEBUG_ENDPOINT ) self._launcher._analytical_engine_endpoint = GS_DEBUG_ENDPOINT else: if not self._launcher.start(): raise RuntimeError("Coordinator Launching failed.") self._launcher_type = self._launcher.type() if self._launcher_type == types_pb2.K8S: self._pods_list = self._launcher.get_pods_list() self._k8s_namespace = self._launcher.get_namespace() self._gie_graph_manager_service_name = ( self._launcher.get_gie_graph_manager_service_name() ) else: self._pods_list = [] # locally launched self._k8s_namespace = "" # analytical engine self._analytical_engine_stub = self._create_grpc_stub() self._analytical_engine_config = None self._analytical_engine_endpoint = None self._builtin_workspace = os.path.join(WORKSPACE, "builtin") # udf app workspace should be bound to a specific session when client connect. self._udf_app_workspace = None # control log fetching self._streaming_logs = True # dangling check self._dangling_timeout_seconds = dangling_timeout_seconds if self._dangling_timeout_seconds >= 0: self._dangling_detecting_timer = threading.Timer( interval=self._dangling_timeout_seconds, function=self._cleanup, args=( True, True, ), ) self._dangling_detecting_timer.start() atexit.register(self._cleanup) def __del__(self): self._cleanup() def _generate_session_id(self): return "session_" + "".join( [random.choice(string.ascii_lowercase) for _ in range(8)] ) def _config_logging(self, log_level): """Set log level basic on config. Args: log_level (str): Log level of stdout handler """ if log_level: log_level = log_level.upper() logger = logging.getLogger("graphscope") logger.setLevel(logging.DEBUG) stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(log_level) formatter = logging.Formatter( "%(asctime)s [%(levelname)s][%(module)s:%(lineno)d]: %(message)s" ) stdout_handler.setFormatter(formatter) logger.addHandler(stdout_handler) def ConnectSession(self, request, context): # A session is already connected. if self._request: return self._make_response( message_pb2.ConnectSessionResponse, code=error_codes_pb2.CONNECTION_ERROR, error_msg="Cannot setup more than one connection at the same time.", ) # Connect to serving coordinator. self._request = request self._analytical_engine_config = self._get_engine_config() # Generate session id self._session_id = self._generate_session_id() self._udf_app_workspace = os.path.join(WORKSPACE, self._session_id) # Session connected, fetch logs via gRPC. self._streaming_logs = True sys.stdout.drop(False) return self._make_response( message_pb2.ConnectSessionResponse, code=error_codes_pb2.OK, session_id=self._session_id, cluster_type=self._launcher.type(), num_workers=self._launcher.num_workers, engine_config=json.dumps(self._analytical_engine_config), pod_name_list=self._pods_list, namespace=self._k8s_namespace, ) def HeartBeat(self, request, context): if self._request and self._request.dangling_timeout_seconds >= 0: # Reset dangling detect timer if self._dangling_detecting_timer: self._dangling_detecting_timer.cancel() self._dangling_detecting_timer = threading.Timer( interval=self._request.dangling_timeout_seconds, function=self._cleanup, args=( self._request.cleanup_instance, True, ), ) self._dangling_detecting_timer.start() # analytical engine request = message_pb2.HeartBeatRequest() try: self._analytical_engine_stub.HeartBeat(request) except Exception as e: return self._make_response( message_pb2.HeartBeatResponse, error_codes_pb2.CONNECTION_ERROR, "connect analytical engine failed: {}".format(str(e)), ) else: return self._make_response( message_pb2.HeartBeatResponse, error_codes_pb2.OK ) def RunStep(self, request, context): # noqa: C901 # only one op in one step is allowed. if len(request.dag_def.op) != 1: return self._make_response( message_pb2.RunStepResponse, error_codes_pb2.INVALID_ARGUMENT_ERROR, "Request's op size is not equal to 1.", ) op = request.dag_def.op[0] # Compile app or not. if op.op == types_pb2.CREATE_APP: try: op, app_sig, app_lib_path = self._maybe_compile_app(op) except Exception as e: error_msg = "Failed to compile app: {}".format(str(e)) logger.error(error_msg) return self._make_response( message_pb2.RunStepResponse, error_codes_pb2.COMPILATION_ERROR, error_msg, op, ) # If engine crashed, we will get a SocketClosed grpc Exception. # In that case, we should notify client the engine is dead. # Compile graph or not # arrow property graph and project graph need to compile if ( ( op.op == types_pb2.CREATE_GRAPH and op.attr[types_pb2.GRAPH_TYPE].graph_type == types_pb2.ARROW_PROPERTY ) or op.op == types_pb2.TRANSFORM_GRAPH or op.op == types_pb2.PROJECT_TO_SIMPLE or op.op == types_pb2.ADD_EDGES or op.op == types_pb2.ADD_VERTICES ): try: op = self._maybe_register_graph(op, request.session_id) except grpc.RpcError as e: logger.error("self._launcher.poll() = %s", self._launcher.poll()) if self._launcher.poll() is not None: message = "Analytical engine exited with %s" % self._launcher.poll() else: message = str(e) return self._make_response( message_pb2.RunStepResponse, error_codes_pb2.FATAL_ERROR, message, op, ) except Exception as e: error_msg = "Graph compile error: {}".format(str(e)) logger.error(error_msg) return self._make_response( message_pb2.RunStepResponse, error_codes_pb2.COMPILATION_ERROR, error_msg, op, ) try: response = self._analytical_engine_stub.RunStep(request) except grpc.RpcError as e: logger.error("self._launcher.poll() = %s", self._launcher.poll()) if self._launcher.poll() is not None: message = "Analytical engine exited with %s" % self._launcher.poll() else: message = str(e) return self._make_response( message_pb2.RunStepResponse, error_codes_pb2.FATAL_ERROR, message, op ) except Exception as e: return self._make_response( message_pb2.RunStepResponse, error_codes_pb2.UNKNOWN, str(e), op ) if response.status.code == error_codes_pb2.OK: if op.op in ( types_pb2.CREATE_GRAPH, types_pb2.ADD_VERTICES, types_pb2.ADD_EDGES, types_pb2.ADD_COLUMN, ): schema_path = os.path.join("/tmp", response.graph_def.key + ".json") self._object_manager.put( response.graph_def.key, GraphMeta( response.graph_def.key, response.graph_def.vineyard_id, response.graph_def.schema_def, schema_path, ), ) if response.graph_def.graph_type == types_pb2.ARROW_PROPERTY: dump_string( to_maxgraph_schema( response.graph_def.schema_def.property_schema_json ), schema_path, ) response.graph_def.schema_path = schema_path elif op.op == types_pb2.CREATE_APP: self._object_manager.put( app_sig, LibMeta(response.result.decode("utf-8"), "app", app_lib_path), ) elif op.op == types_pb2.UNLOAD_GRAPH: self._object_manager.pop(op.attr[types_pb2.GRAPH_NAME].s.decode()) elif op.op == types_pb2.UNLOAD_APP: self._object_manager.pop(op.attr[types_pb2.APP_NAME].s.decode()) return response def _maybe_compile_app(self, op): app_sig = get_app_sha256(op.attr) space = self._builtin_workspace if types_pb2.GAR in op.attr: space = self._udf_app_workspace app_lib_path = get_lib_path(os.path.join(space, app_sig), app_sig) if not os.path.isfile(app_lib_path): compiled_path = self._compile_lib_and_distribute(compile_app, app_sig, op) if app_lib_path != compiled_path: raise RuntimeError("Computed path not equal to compiled path.") op.attr[types_pb2.APP_LIBRARY_PATH].CopyFrom( attr_value_pb2.AttrValue(s=app_lib_path.encode("utf-8")) ) return op, app_sig, app_lib_path def _maybe_register_graph(self, op, session_id): graph_sig = get_graph_sha256(op.attr) space = self._builtin_workspace graph_lib_path = get_lib_path(os.path.join(space, graph_sig), graph_sig) if not os.path.isfile(graph_lib_path): compiled_path = self._compile_lib_and_distribute( compile_graph_frame, graph_sig, op ) if graph_lib_path != compiled_path: raise RuntimeError("Computed path not equal to compiled path.") if graph_sig not in self._object_manager: # register graph op_def = op_def_pb2.OpDef(op=types_pb2.REGISTER_GRAPH_TYPE) op_def.attr[types_pb2.GRAPH_LIBRARY_PATH].CopyFrom( attr_value_pb2.AttrValue(s=graph_lib_path.encode("utf-8")) ) op_def.attr[types_pb2.TYPE_SIGNATURE].CopyFrom( attr_value_pb2.AttrValue(s=graph_sig.encode("utf-8")) ) op_def.attr[types_pb2.GRAPH_TYPE].CopyFrom( attr_value_pb2.AttrValue( graph_type=op.attr[types_pb2.GRAPH_TYPE].graph_type ) ) dag_def = op_def_pb2.DagDef() dag_def.op.extend([op_def]) register_request = message_pb2.RunStepRequest( session_id=session_id, dag_def=dag_def ) register_response = self._analytical_engine_stub.RunStep(register_request) if register_response.status.code == error_codes_pb2.OK: self._object_manager.put( graph_sig, LibMeta(register_response.result, "graph_frame", graph_lib_path), ) else: raise RuntimeError("Error occur when register graph") op.attr[types_pb2.TYPE_SIGNATURE].CopyFrom( attr_value_pb2.AttrValue(s=graph_sig.encode("utf-8")) ) return op def FetchLogs(self, request, context): while self._streaming_logs: try: message = sys.stdout.poll(timeout=3) except queue.Empty: pass else: if self._streaming_logs: yield self._make_response( message_pb2.FetchLogsResponse, error_codes_pb2.OK, message=message, ) def CloseSession(self, request, context): """ Disconnect session, note that it doesn't clean up any resources. """ if request.session_id != self._session_id: return self._make_response( message_pb2.CloseSessionResponse, error_codes_pb2.INVALID_ARGUMENT_ERROR, "Session handle does not match", ) self._cleanup( cleanup_instance=self._request.cleanup_instance, is_dangling=False ) self._request = None # Session closed, stop streaming logs sys.stdout.drop(True) self._streaming_logs = False return self._make_response(message_pb2.CloseSessionResponse, error_codes_pb2.OK) def CreateInteractiveInstance(self, request, context): object_id = request.object_id gremlin_server_cpu = request.gremlin_server_cpu gremlin_server_mem = request.gremlin_server_mem with open(request.schema_path) as file: schema_json = file.read() params = { "graphName": "%s" % object_id, } if self._launcher_type == types_pb2.K8S: manager_host = MAXGRAPH_MANAGER_HOST % ( self._gie_graph_manager_service_name, self._k8s_namespace, ) params.update( { "schemaJson": schema_json, "podNameList": ",".join(self._pods_list), "containerName": ENGINE_CONTAINER, "preemptive": str(self._launcher.preemptive), "gremlinServerCpu": str(gremlin_server_cpu), "gremlinServerMem": gremlin_server_mem, } ) post_url = "%s/instance/create" % manager_host engine_params = [ "{}:{}".format(key, value) for key, value in request.engine_params.items() ] params["engineParams"] = "'{}'".format(";".join(engine_params)) else: manager_host = self._launcher.graph_manager_endpoint params.update( { "vineyardIpcSocket": self._launcher.vineyard_socket, "schemaPath": request.schema_path, } ) post_url = "http://%s/instance/create_local" % manager_host post_data = urllib.parse.urlencode(params).encode("utf-8") create_res = urllib.request.urlopen(url=post_url, data=post_data) res_json = json.load(create_res) error_code = res_json["errorCode"] if error_code == 0: front_host = res_json["frontHost"] front_port = res_json["frontPort"] logger.info( "build frontend %s:%d for graph %ld", front_host, front_port, object_id, ) return message_pb2.CreateInteractiveResponse( status=message_pb2.ResponseStatus(code=error_codes_pb2.OK), frontend_host=front_host, frontend_port=front_port, object_id=object_id, ) else: error_message = ( "create interactive instance for object id %ld failed with error code %d message %s" % (object_id, error_code, res_json["errorMessage"]) ) logger.error(error_message) return message_pb2.CreateInteractiveResponse( status=message_pb2.ResponseStatus( code=error_codes_pb2.INTERACTIVE_ENGINE_INTERNAL_ERROR, error_msg=error_message, ), frontend_host="", frontend_port=0, object_id=object_id, ) def CloseInteractiveInstance(self, request, context): object_id = request.object_id if self._launcher_type == types_pb2.K8S: manager_host = MAXGRAPH_MANAGER_HOST % ( self._gie_graph_manager_service_name, self._k8s_namespace, ) pod_name_list = ",".join(self._pods_list) close_url = "%s/instance/close?graphName=%ld&podNameList=%s&containerName=%s&waitingForDelete=%s" % ( manager_host, object_id, pod_name_list, ENGINE_CONTAINER, str(self._launcher.waiting_for_delete()), ) else: manager_host = self._launcher.graph_manager_endpoint close_url = "http://%s/instance/close_local?graphName=%ld" % ( manager_host, object_id, ) logger.info("Coordinator close interactive instance with url[%s]" % close_url) try: close_res = urllib.request.urlopen(close_url).read() except Exception as e: logger.error("Failed to close interactive instance: %s", e) return message_pb2.CloseInteractiveResponse( status=message_pb2.ResponseStatus( code=error_codes_pb2.INTERACTIVE_ENGINE_INTERNAL_ERROR, error_msg="Internal error during close interactive instance: %d, %s" % (400, e), ) ) res_json = json.loads(close_res.decode("utf-8", errors="ignore")) error_code = res_json["errorCode"] if 0 == error_code: return message_pb2.CloseInteractiveResponse( status=message_pb2.ResponseStatus(code=error_codes_pb2.OK) ) else: error_message = ( "Failed to close interactive instance for object id %ld with error code %d message %s" % (object_id, error_code, res_json["errorMessage"]) ) logger.error("Failed to close interactive instance: %s", error_message) return message_pb2.CloseInteractiveResponse( status=message_pb2.ResponseStatus( code=error_codes_pb2.INTERACTIVE_ENGINE_INTERNAL_ERROR, error_msg=error_message, ) ) def CreateLearningInstance(self, request, context): logger.info( "Coordinator create learning instance with object id %ld", request.object_id, ) object_id = request.object_id handle = request.handle config = request.config endpoints = self._launcher.create_learning_instance(object_id, handle, config) return message_pb2.CreateLearningInstanceResponse( status=message_pb2.ResponseStatus(code=error_codes_pb2.OK), endpoints=",".join(endpoints), ) def CloseLearningInstance(self, request, context): logger.info( "Coordinator close learning instance with object id %ld", request.object_id, ) self._launcher.close_learning_instance(request.object_id) return message_pb2.CloseLearningInstanceResponse( status=message_pb2.ResponseStatus(code=error_codes_pb2.OK) ) @staticmethod def _make_response(resp_cls, code, error_msg="", op=None, **args): resp = resp_cls( status=message_pb2.ResponseStatus(code=code, error_msg=error_msg), **args ) if op: resp.status.op.CopyFrom(op) return resp def _cleanup(self, cleanup_instance=True, is_dangling=False): # clean up session resources. for key in self._object_manager.keys(): obj = self._object_manager.get(key) obj_type = obj.type unload_type = None if obj_type == "app": unload_type = types_pb2.UNLOAD_APP config = { types_pb2.APP_NAME: attr_value_pb2.AttrValue( s=obj.key.encode("utf-8") ) } elif obj_type == "graph": unload_type = types_pb2.UNLOAD_GRAPH config = { types_pb2.GRAPH_NAME: attr_value_pb2.AttrValue( s=obj.key.encode("utf-8") ) } # dynamic graph doesn't have a vineyard id if obj.vineyard_id != -1: config[types_pb2.VINEYARD_ID] = attr_value_pb2.AttrValue( i=obj.vineyard_id ) if unload_type: dag_def = create_single_op_dag(unload_type, config) request = message_pb2.RunStepRequest( session_id=self._session_id, dag_def=dag_def ) self._analytical_engine_stub.RunStep(request) self._object_manager.clear() self._request = None # cancel dangling detect timer if self._dangling_detecting_timer: self._dangling_detecting_timer.cancel() # close engines if cleanup_instance: self._analytical_engine_stub = None self._analytical_engine_endpoint = None self._launcher.stop(is_dangling=is_dangling) self._session_id = None def _create_grpc_stub(self): options = [ ("grpc.max_send_message_length", 2147483647), ("grpc.max_receive_message_length", 2147483647), ] channel = grpc.insecure_channel( self._launcher.analytical_engine_endpoint, options=options ) return engine_service_pb2_grpc.EngineServiceStub(channel) def _get_engine_config(self): op_def = op_def_pb2.OpDef(op=types_pb2.GET_ENGINE_CONFIG) dag_def = op_def_pb2.DagDef() dag_def.op.extend([op_def]) fetch_request = message_pb2.RunStepRequest( session_id=self._session_id, dag_def=dag_def ) fetch_response = self._analytical_engine_stub.RunStep(fetch_request) config = json.loads(fetch_response.result.decode("utf-8")) if self._launcher_type == types_pb2.K8S: config["vineyard_service_name"] = self._launcher.get_vineyard_service_name() config["vineyard_rpc_endpoint"] = self._launcher.get_vineyard_rpc_endpoint() config["mars_endpoint"] = self._launcher.get_mars_scheduler_endpoint() else: config["engine_hosts"] = self._launcher.hosts config["mars_endpoint"] = None return config def _compile_lib_and_distribute(self, compile_func, lib_name, op): if self._analytical_engine_config is None: # fetch experimental_on compile option from engine self._analytical_engine_config = self._get_engine_config() space = self._builtin_workspace if types_pb2.GAR in op.attr: space = self._udf_app_workspace app_lib_path = compile_func( space, lib_name, op.attr, self._analytical_engine_config ) if self._launcher_type == types_pb2.K8S: distribute_lib_on_k8s(",".join(self._pods_list), app_lib_path) else: distribute_lib_via_hosts(self._launcher.hosts, app_lib_path) return app_lib_path def parse_sys_args(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( "--num_workers", type=int, default=4, help="The number of engine workers.", ) parser.add_argument( "--preemptive", type=str2bool, nargs="?", const=True, default=True, help="Support resource preemption or resource guarantee", ) parser.add_argument( "--instance_id", type=str, help="Unique id for each GraphScope instance.", ) parser.add_argument( "--port", type=int, default=63800, help="Coordinator service port.", ) parser.add_argument( "--log_level", type=str, default="info", help="Log level, info or debug.", ) parser.add_argument( "--hosts", type=str, default="localhost", help="A list of hostname, comma separated.", ) parser.add_argument( "--vineyard_socket", type=str, default=None, help="Socket path to connect to vineyard, random socket will be created if param missing.", ) parser.add_argument( "--cluster_type", type=str, default="k8s", help="Deploy graphscope components on local or kubernetes cluster.", ) parser.add_argument( "--k8s_namespace", type=str, default="graphscope", help="Contains the namespace to create all resource inside, namespace must be exist.", ) parser.add_argument( "--k8s_service_type", type=str, default="NodePort", help="Valid options are NodePort, and LoadBalancer.", ) parser.add_argument( "--k8s_gs_image", type=str, default="registry.cn-hongkong.aliyuncs.com/graphscope/graphscope:{}".format( __version__ ), help="Docker image of graphscope engines.", ) parser.add_argument( "--k8s_coordinator_name", type=str, default="", help="Coordinator name in graphscope instance.", ) parser.add_argument( "--k8s_coordinator_service_name", type=str, default="", help="Coordinator service name in graphscope instance.", ) parser.add_argument( "--k8s_etcd_image", type=str, default="registry.cn-hongkong.aliyuncs.com/graphscope/etcd:v3.4.13", help="Docker image of etcd, used by vineyard.", ) parser.add_argument( "--k8s_gie_graph_manager_image", type=str, default="registry.cn-hongkong.aliyuncs.com/graphscope/maxgraph_standalone_manager:{}".format( __version__ ), help="Graph Manager image of graph interactive engine.", ) parser.add_argument( "--k8s_zookeeper_image", type=str, default="registry.cn-hongkong.aliyuncs.com/graphscope/zookeeper:3.4.10", help="Docker image of zookeeper, used by graph interactive engine.", ) parser.add_argument( "--k8s_image_pull_policy", type=str, default="IfNotPresent", help="Kubernetes image pull policy.", ) parser.add_argument( "--k8s_image_pull_secrets", type=str, default="graphscope", help="A list of secret name, comma separated.", ) parser.add_argument( "--k8s_vineyard_daemonset", type=str, default="", help="Try to use the existing vineyard DaemonSet with name 'k8s_vineyard_daemonset'.", ) parser.add_argument( "--k8s_vineyard_cpu", type=float, default=1.0, help="Cpu cores of vinayard container.", ) parser.add_argument( "--k8s_vineyard_mem", type=str, default="256Mi", help="Memory of vineyard container, suffix with ['Mi', 'Gi', 'Ti'].", ) parser.add_argument( "--k8s_vineyard_shared_mem", type=str, default="8Gi", help="Plasma memory in vineyard, suffix with ['Mi', 'Gi', 'Ti'].", ) parser.add_argument( "--k8s_engine_cpu", type=float, default=1.0, help="Cpu cores of engine container, default: 1.0", ) parser.add_argument( "--k8s_engine_mem", type=str, default="256Mi", help="Memory of engine container, suffix with ['Mi', 'Gi', 'Ti'].", ) parser.add_argument( "--k8s_etcd_num_pods", type=int, default=3, help="The number of etcd pods.", ) parser.add_argument( "--k8s_etcd_cpu", type=float, default=1.0, help="Cpu cores of etcd pod, default: 1.0", ) parser.add_argument( "--k8s_etcd_mem", type=str, default="256Mi", help="Memory of etcd pod, suffix with ['Mi', 'Gi', 'Ti'].", ) parser.add_argument( "--k8s_zookeeper_cpu", type=float, default=1.0, help="Cpu cores of zookeeper container, default: 1.0", ) parser.add_argument( "--k8s_zookeeper_mem", type=str, default="256Mi", help="Memory of zookeeper container, suffix with ['Mi', 'Gi', 'Ti'].", ) parser.add_argument( "--k8s_gie_graph_manager_cpu", type=float, default=1.0, help="Cpu cores of graph manager container, default: 1.0", ) parser.add_argument( "--k8s_gie_graph_manager_mem", type=str, default="256Mi", help="Memory of graph manager container, suffix with ['Mi', 'Gi', 'Ti'].", ) parser.add_argument( "--k8s_with_mars", type=str2bool, nargs="?", const=True, default=False, help="Enable mars or not.", ) parser.add_argument( "--k8s_mars_worker_cpu", type=float, default=0.5, help="Cpu cores of mars worker container, default: 0.5", ) parser.add_argument( "--k8s_mars_worker_mem", type=str, default="4Gi", help="Memory of mars worker container, default: 4Gi", ) parser.add_argument( "--k8s_mars_scheduler_cpu", type=float, default=0.5, help="Cpu cores of mars scheduler container, default: 0.5", ) parser.add_argument( "--k8s_mars_scheduler_mem", type=str, default="2Gi", help="Memory of mars scheduler container, default: 2Gi", ) parser.add_argument( "--k8s_volumes", type=str, default="{}", help="A json string for kubernetes volumes.", ) parser.add_argument( "--timeout_seconds", type=int, default=600, help="Launch failed after waiting timeout seconds.", ) parser.add_argument( "--dangling_timeout_seconds", type=int, default=600, help="Kill graphscope instance after seconds of client disconnect.", ) parser.add_argument( "--waiting_for_delete", type=str2bool, nargs="?", const=True, default=False, help="Waiting for delete graphscope instance.", ) parser.add_argument( "--k8s_delete_namespace", type=str2bool, nargs="?", const=True, default=False, help="Delete namespace or not.", ) return parser.parse_args() def launch_graphscope(): args = parse_sys_args() logger.info("Launching with args %s", args) if args.cluster_type == "k8s": launcher = KubernetesClusterLauncher( namespace=args.k8s_namespace, service_type=args.k8s_service_type, gs_image=args.k8s_gs_image, etcd_image=args.k8s_etcd_image, zookeeper_image=args.k8s_zookeeper_image, gie_graph_manager_image=args.k8s_gie_graph_manager_image, coordinator_name=args.k8s_coordinator_name, coordinator_service_name=args.k8s_coordinator_service_name, etcd_num_pods=args.k8s_etcd_num_pods, etcd_cpu=args.k8s_etcd_cpu, etcd_mem=args.k8s_etcd_mem, zookeeper_cpu=args.k8s_zookeeper_cpu, zookeeper_mem=args.k8s_zookeeper_mem, gie_graph_manager_cpu=args.k8s_gie_graph_manager_cpu, gie_graph_manager_mem=args.k8s_gie_graph_manager_mem, engine_cpu=args.k8s_engine_cpu, engine_mem=args.k8s_engine_mem, vineyard_daemonset=args.k8s_vineyard_daemonset, vineyard_cpu=args.k8s_vineyard_cpu, vineyard_mem=args.k8s_vineyard_mem, vineyard_shared_mem=args.k8s_vineyard_shared_mem, mars_worker_cpu=args.k8s_mars_worker_cpu, mars_worker_mem=args.k8s_mars_worker_mem, mars_scheduler_cpu=args.k8s_mars_scheduler_cpu, mars_scheduler_mem=args.k8s_mars_scheduler_mem, with_mars=args.k8s_with_mars, image_pull_policy=args.k8s_image_pull_policy, image_pull_secrets=args.k8s_image_pull_secrets, volumes=args.k8s_volumes, num_workers=args.num_workers, preemptive=args.preemptive, instance_id=args.instance_id, log_level=args.log_level, timeout_seconds=args.timeout_seconds, waiting_for_delete=args.waiting_for_delete, delete_namespace=args.k8s_delete_namespace, ) elif args.cluster_type == "hosts": launcher = LocalLauncher( num_workers=args.num_workers, hosts=args.hosts, vineyard_socket=args.vineyard_socket, shared_mem=args.k8s_vineyard_shared_mem, log_level=args.log_level, timeout_seconds=args.timeout_seconds, ) else: raise RuntimeError("Expect hosts or k8s of cluster_type parameter") coordinator_service_servicer = CoordinatorServiceServicer( launcher=launcher, dangling_timeout_seconds=args.dangling_timeout_seconds, log_level=args.log_level, ) # register gRPC server server = grpc.server(futures.ThreadPoolExecutor(os.cpu_count() or 1)) coordinator_service_pb2_grpc.add_CoordinatorServiceServicer_to_server( coordinator_service_servicer, server ) server.add_insecure_port("0.0.0.0:{}".format(args.port)) logger.info("Coordinator server listen at 0.0.0.0:%d", args.port) server.start() # handle SIGTERM signal def terminate(signum, frame): global coordinator_service_servicer coordinator_service_servicer._cleanup() signal.signal(signal.SIGTERM, terminate) try: # Grpc has handled SIGINT server.wait_for_termination() except KeyboardInterrupt: coordinator_service_servicer._cleanup() if __name__ == "__main__": launch_graphscope()
[ "logging.getLogger", "gscoordinator.launcher.LocalLauncher", "logging.StreamHandler", "gscoordinator.utils.create_single_op_dag", "gscoordinator.utils.get_graph_sha256", "graphscope.proto.engine_service_pb2_grpc.EngineServiceStub", "gscoordinator.cluster.KubernetesClusterLauncher", "os.cpu_count", "sys.stdout.poll", "graphscope.proto.op_def_pb2.DagDef", "argparse.ArgumentParser", "gscoordinator.object_manager.GraphMeta", "json.dumps", "gscoordinator.io_utils.StdoutWrapper", "atexit.register", "random.choice", "gscoordinator.object_manager.LibMeta", "threading.Timer", "graphscope.proto.message_pb2.ResponseStatus", "os.path.isfile", "gscoordinator.utils.get_app_sha256", "gscoordinator.object_manager.ObjectManager", "os.path.abspath", "signal.signal", "graphscope.proto.coordinator_service_pb2_grpc.add_CoordinatorServiceServicer_to_server", "gscoordinator.utils.to_maxgraph_schema", "graphscope.proto.message_pb2.HeartBeatRequest", "sys.stdout.drop", "graphscope.proto.message_pb2.RunStepRequest", "logging.Formatter", "graphscope.proto.attr_value_pb2.AttrValue", "graphscope.proto.op_def_pb2.OpDef", "os.path.join", "os.environ.get", "grpc.insecure_channel", "gscoordinator.utils.distribute_lib_via_hosts", "json.load" ]
[((1068, 1093), 'gscoordinator.io_utils.StdoutWrapper', 'StdoutWrapper', (['sys.stdout'], {}), '(sys.stdout)\n', (1081, 1093), False, 'from gscoordinator.io_utils import StdoutWrapper\n'), ((2361, 2397), 'os.path.join', 'os.path.join', (['COORDINATOR_HOME', '""".."""'], {}), "(COORDINATOR_HOME, '..')\n", (2373, 2397), False, 'import os\n'), ((2487, 2537), 'os.path.join', 'os.path.join', (['GRAPHSCOPE_HOME', '"""analytical_engine"""'], {}), "(GRAPHSCOPE_HOME, 'analytical_engine')\n", (2499, 2537), False, 'import os\n'), ((2563, 2624), 'os.path.join', 'os.path.join', (['ANALYTICAL_ENGINE_HOME', '"""build"""', '"""grape_engine"""'], {}), "(ANALYTICAL_ENGINE_HOME, 'build', 'grape_engine')\n", (2575, 2624), False, 'import os\n'), ((2640, 2699), 'os.path.join', 'os.path.join', (['COORDINATOR_HOME', '"""gscoordinator"""', '"""template"""'], {}), "(COORDINATOR_HOME, 'gscoordinator', 'template')\n", (2652, 2699), False, 'import os\n'), ((2728, 2806), 'os.path.join', 'os.path.join', (['COORDINATOR_HOME', '"""gscoordinator"""', '"""builtin/app/builtin_app.gar"""'], {}), "(COORDINATOR_HOME, 'gscoordinator', 'builtin/app/builtin_app.gar')\n", (2740, 2806), False, 'import os\n'), ((2833, 2872), 'os.environ.get', 'os.environ.get', (['"""GS_DEBUG_ENDPOINT"""', '""""""'], {}), "('GS_DEBUG_ENDPOINT', '')\n", (2847, 2872), False, 'import os\n'), ((3006, 3037), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (3023, 3037), False, 'import logging\n'), ((28227, 28306), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (28250, 28306), False, 'import argparse\n'), ((38414, 38526), 'graphscope.proto.coordinator_service_pb2_grpc.add_CoordinatorServiceServicer_to_server', 'coordinator_service_pb2_grpc.add_CoordinatorServiceServicer_to_server', (['coordinator_service_servicer', 'server'], {}), '(\n coordinator_service_servicer, server)\n', (38483, 38526), False, 'from graphscope.proto import coordinator_service_pb2_grpc\n'), ((38847, 38887), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'terminate'], {}), '(signal.SIGTERM, terminate)\n', (38860, 38887), False, 'import signal\n'), ((2315, 2340), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2330, 2340), False, 'import os\n'), ((3758, 3773), 'gscoordinator.object_manager.ObjectManager', 'ObjectManager', ([], {}), '()\n', (3771, 3773), False, 'from gscoordinator.object_manager import ObjectManager\n'), ((5104, 5138), 'os.path.join', 'os.path.join', (['WORKSPACE', '"""builtin"""'], {}), "(WORKSPACE, 'builtin')\n", (5116, 5138), False, 'import os\n'), ((5800, 5830), 'atexit.register', 'atexit.register', (['self._cleanup'], {}), '(self._cleanup)\n', (5815, 5830), False, 'import atexit\n'), ((6282, 6313), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (6299, 6313), False, 'import logging\n'), ((6379, 6412), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (6400, 6412), False, 'import logging\n'), ((6477, 6566), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(levelname)s][%(module)s:%(lineno)d]: %(message)s"""'], {}), "(\n '%(asctime)s [%(levelname)s][%(module)s:%(lineno)d]: %(message)s')\n", (6494, 6566), False, 'import logging\n'), ((7299, 7340), 'os.path.join', 'os.path.join', (['WORKSPACE', 'self._session_id'], {}), '(WORKSPACE, self._session_id)\n', (7311, 7340), False, 'import os\n'), ((7436, 7458), 'sys.stdout.drop', 'sys.stdout.drop', (['(False)'], {}), '(False)\n', (7451, 7458), False, 'import sys\n'), ((8551, 8581), 'graphscope.proto.message_pb2.HeartBeatRequest', 'message_pb2.HeartBeatRequest', ([], {}), '()\n', (8579, 8581), False, 'from graphscope.proto import message_pb2\n'), ((13899, 13922), 'gscoordinator.utils.get_app_sha256', 'get_app_sha256', (['op.attr'], {}), '(op.attr)\n', (13913, 13922), False, 'from gscoordinator.utils import get_app_sha256\n'), ((14626, 14651), 'gscoordinator.utils.get_graph_sha256', 'get_graph_sha256', (['op.attr'], {}), '(op.attr)\n', (14642, 14651), False, 'from gscoordinator.utils import get_graph_sha256\n'), ((17574, 17595), 'sys.stdout.drop', 'sys.stdout.drop', (['(True)'], {}), '(True)\n', (17589, 17595), False, 'import sys\n'), ((19536, 19557), 'json.load', 'json.load', (['create_res'], {}), '(create_res)\n', (19545, 19557), False, 'import json\n'), ((26402, 26488), 'grpc.insecure_channel', 'grpc.insecure_channel', (['self._launcher.analytical_engine_endpoint'], {'options': 'options'}), '(self._launcher.analytical_engine_endpoint, options=\n options)\n', (26423, 26488), False, 'import grpc\n'), ((26521, 26571), 'graphscope.proto.engine_service_pb2_grpc.EngineServiceStub', 'engine_service_pb2_grpc.EngineServiceStub', (['channel'], {}), '(channel)\n', (26562, 26571), False, 'from graphscope.proto import engine_service_pb2_grpc\n'), ((26624, 26672), 'graphscope.proto.op_def_pb2.OpDef', 'op_def_pb2.OpDef', ([], {'op': 'types_pb2.GET_ENGINE_CONFIG'}), '(op=types_pb2.GET_ENGINE_CONFIG)\n', (26640, 26672), False, 'from graphscope.proto import op_def_pb2\n'), ((26691, 26710), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (26708, 26710), False, 'from graphscope.proto import op_def_pb2\n'), ((26771, 26843), 'graphscope.proto.message_pb2.RunStepRequest', 'message_pb2.RunStepRequest', ([], {'session_id': 'self._session_id', 'dag_def': 'dag_def'}), '(session_id=self._session_id, dag_def=dag_def)\n', (26797, 26843), False, 'from graphscope.proto import message_pb2\n'), ((35800, 37332), 'gscoordinator.cluster.KubernetesClusterLauncher', 'KubernetesClusterLauncher', ([], {'namespace': 'args.k8s_namespace', 'service_type': 'args.k8s_service_type', 'gs_image': 'args.k8s_gs_image', 'etcd_image': 'args.k8s_etcd_image', 'zookeeper_image': 'args.k8s_zookeeper_image', 'gie_graph_manager_image': 'args.k8s_gie_graph_manager_image', 'coordinator_name': 'args.k8s_coordinator_name', 'coordinator_service_name': 'args.k8s_coordinator_service_name', 'etcd_num_pods': 'args.k8s_etcd_num_pods', 'etcd_cpu': 'args.k8s_etcd_cpu', 'etcd_mem': 'args.k8s_etcd_mem', 'zookeeper_cpu': 'args.k8s_zookeeper_cpu', 'zookeeper_mem': 'args.k8s_zookeeper_mem', 'gie_graph_manager_cpu': 'args.k8s_gie_graph_manager_cpu', 'gie_graph_manager_mem': 'args.k8s_gie_graph_manager_mem', 'engine_cpu': 'args.k8s_engine_cpu', 'engine_mem': 'args.k8s_engine_mem', 'vineyard_daemonset': 'args.k8s_vineyard_daemonset', 'vineyard_cpu': 'args.k8s_vineyard_cpu', 'vineyard_mem': 'args.k8s_vineyard_mem', 'vineyard_shared_mem': 'args.k8s_vineyard_shared_mem', 'mars_worker_cpu': 'args.k8s_mars_worker_cpu', 'mars_worker_mem': 'args.k8s_mars_worker_mem', 'mars_scheduler_cpu': 'args.k8s_mars_scheduler_cpu', 'mars_scheduler_mem': 'args.k8s_mars_scheduler_mem', 'with_mars': 'args.k8s_with_mars', 'image_pull_policy': 'args.k8s_image_pull_policy', 'image_pull_secrets': 'args.k8s_image_pull_secrets', 'volumes': 'args.k8s_volumes', 'num_workers': 'args.num_workers', 'preemptive': 'args.preemptive', 'instance_id': 'args.instance_id', 'log_level': 'args.log_level', 'timeout_seconds': 'args.timeout_seconds', 'waiting_for_delete': 'args.waiting_for_delete', 'delete_namespace': 'args.k8s_delete_namespace'}), '(namespace=args.k8s_namespace, service_type=args.\n k8s_service_type, gs_image=args.k8s_gs_image, etcd_image=args.\n k8s_etcd_image, zookeeper_image=args.k8s_zookeeper_image,\n gie_graph_manager_image=args.k8s_gie_graph_manager_image,\n coordinator_name=args.k8s_coordinator_name, coordinator_service_name=\n args.k8s_coordinator_service_name, etcd_num_pods=args.k8s_etcd_num_pods,\n etcd_cpu=args.k8s_etcd_cpu, etcd_mem=args.k8s_etcd_mem, zookeeper_cpu=\n args.k8s_zookeeper_cpu, zookeeper_mem=args.k8s_zookeeper_mem,\n gie_graph_manager_cpu=args.k8s_gie_graph_manager_cpu,\n gie_graph_manager_mem=args.k8s_gie_graph_manager_mem, engine_cpu=args.\n k8s_engine_cpu, engine_mem=args.k8s_engine_mem, vineyard_daemonset=args\n .k8s_vineyard_daemonset, vineyard_cpu=args.k8s_vineyard_cpu,\n vineyard_mem=args.k8s_vineyard_mem, vineyard_shared_mem=args.\n k8s_vineyard_shared_mem, mars_worker_cpu=args.k8s_mars_worker_cpu,\n mars_worker_mem=args.k8s_mars_worker_mem, mars_scheduler_cpu=args.\n k8s_mars_scheduler_cpu, mars_scheduler_mem=args.k8s_mars_scheduler_mem,\n with_mars=args.k8s_with_mars, image_pull_policy=args.\n k8s_image_pull_policy, image_pull_secrets=args.k8s_image_pull_secrets,\n volumes=args.k8s_volumes, num_workers=args.num_workers, preemptive=args\n .preemptive, instance_id=args.instance_id, log_level=args.log_level,\n timeout_seconds=args.timeout_seconds, waiting_for_delete=args.\n waiting_for_delete, delete_namespace=args.k8s_delete_namespace)\n', (35825, 37332), False, 'from gscoordinator.cluster import KubernetesClusterLauncher\n'), ((5518, 5622), 'threading.Timer', 'threading.Timer', ([], {'interval': 'self._dangling_timeout_seconds', 'function': 'self._cleanup', 'args': '(True, True)'}), '(interval=self._dangling_timeout_seconds, function=self.\n _cleanup, args=(True, True))\n', (5533, 5622), False, 'import threading\n'), ((8197, 8335), 'threading.Timer', 'threading.Timer', ([], {'interval': 'self._request.dangling_timeout_seconds', 'function': 'self._cleanup', 'args': '(self._request.cleanup_instance, True)'}), '(interval=self._request.dangling_timeout_seconds, function=\n self._cleanup, args=(self._request.cleanup_instance, True))\n', (8212, 8335), False, 'import threading\n'), ((14080, 14108), 'os.path.join', 'os.path.join', (['space', 'app_sig'], {}), '(space, app_sig)\n', (14092, 14108), False, 'import os\n'), ((14134, 14162), 'os.path.isfile', 'os.path.isfile', (['app_lib_path'], {}), '(app_lib_path)\n', (14148, 14162), False, 'import os\n'), ((14730, 14760), 'os.path.join', 'os.path.join', (['space', 'graph_sig'], {}), '(space, graph_sig)\n', (14742, 14760), False, 'import os\n'), ((14788, 14818), 'os.path.isfile', 'os.path.isfile', (['graph_lib_path'], {}), '(graph_lib_path)\n', (14802, 14818), False, 'import os\n'), ((15175, 15225), 'graphscope.proto.op_def_pb2.OpDef', 'op_def_pb2.OpDef', ([], {'op': 'types_pb2.REGISTER_GRAPH_TYPE'}), '(op=types_pb2.REGISTER_GRAPH_TYPE)\n', (15191, 15225), False, 'from graphscope.proto import op_def_pb2\n'), ((15747, 15766), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (15764, 15766), False, 'from graphscope.proto import op_def_pb2\n'), ((15838, 15904), 'graphscope.proto.message_pb2.RunStepRequest', 'message_pb2.RunStepRequest', ([], {'session_id': 'session_id', 'dag_def': 'dag_def'}), '(session_id=session_id, dag_def=dag_def)\n', (15864, 15904), False, 'from graphscope.proto import message_pb2\n'), ((28101, 28161), 'gscoordinator.utils.distribute_lib_via_hosts', 'distribute_lib_via_hosts', (['self._launcher.hosts', 'app_lib_path'], {}), '(self._launcher.hosts, app_lib_path)\n', (28125, 28161), False, 'from gscoordinator.utils import distribute_lib_via_hosts\n'), ((37739, 37957), 'gscoordinator.launcher.LocalLauncher', 'LocalLauncher', ([], {'num_workers': 'args.num_workers', 'hosts': 'args.hosts', 'vineyard_socket': 'args.vineyard_socket', 'shared_mem': 'args.k8s_vineyard_shared_mem', 'log_level': 'args.log_level', 'timeout_seconds': 'args.timeout_seconds'}), '(num_workers=args.num_workers, hosts=args.hosts,\n vineyard_socket=args.vineyard_socket, shared_mem=args.\n k8s_vineyard_shared_mem, log_level=args.log_level, timeout_seconds=args\n .timeout_seconds)\n', (37752, 37957), False, 'from gscoordinator.launcher import LocalLauncher\n'), ((7748, 7790), 'json.dumps', 'json.dumps', (['self._analytical_engine_config'], {}), '(self._analytical_engine_config)\n', (7758, 7790), False, 'import json\n'), ((12541, 12595), 'os.path.join', 'os.path.join', (['"""/tmp"""', "(response.graph_def.key + '.json')"], {}), "('/tmp', response.graph_def.key + '.json')\n", (12553, 12595), False, 'import os\n'), ((15595, 15672), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {'graph_type': 'op.attr[types_pb2.GRAPH_TYPE].graph_type'}), '(graph_type=op.attr[types_pb2.GRAPH_TYPE].graph_type)\n', (15619, 15672), False, 'from graphscope.proto import attr_value_pb2\n'), ((16625, 16651), 'sys.stdout.poll', 'sys.stdout.poll', ([], {'timeout': '(3)'}), '(timeout=3)\n', (16640, 16651), False, 'import sys\n'), ((23620, 23671), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.OK'}), '(code=error_codes_pb2.OK)\n', (23646, 23671), False, 'from graphscope.proto import message_pb2\n'), ((24057, 24108), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.OK'}), '(code=error_codes_pb2.OK)\n', (24083, 24108), False, 'from graphscope.proto import message_pb2\n'), ((24253, 24311), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'code', 'error_msg': 'error_msg'}), '(code=code, error_msg=error_msg)\n', (24279, 24311), False, 'from graphscope.proto import message_pb2\n'), ((25512, 25553), 'gscoordinator.utils.create_single_op_dag', 'create_single_op_dag', (['unload_type', 'config'], {}), '(unload_type, config)\n', (25532, 25553), False, 'from gscoordinator.utils import create_single_op_dag\n'), ((25580, 25652), 'graphscope.proto.message_pb2.RunStepRequest', 'message_pb2.RunStepRequest', ([], {'session_id': 'self._session_id', 'dag_def': 'dag_def'}), '(session_id=self._session_id, dag_def=dag_def)\n', (25606, 25652), False, 'from graphscope.proto import message_pb2\n'), ((38388, 38402), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (38400, 38402), False, 'import os\n'), ((5966, 6003), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (5979, 6003), False, 'import random\n'), ((12702, 12816), 'gscoordinator.object_manager.GraphMeta', 'GraphMeta', (['response.graph_def.key', 'response.graph_def.vineyard_id', 'response.graph_def.schema_def', 'schema_path'], {}), '(response.graph_def.key, response.graph_def.vineyard_id, response.\n graph_def.schema_def, schema_path)\n', (12711, 12816), False, 'from gscoordinator.object_manager import GraphMeta\n'), ((16184, 16248), 'gscoordinator.object_manager.LibMeta', 'LibMeta', (['register_response.result', '"""graph_frame"""', 'graph_lib_path'], {}), "(register_response.result, 'graph_frame', graph_lib_path)\n", (16191, 16248), False, 'from gscoordinator.object_manager import LibMeta\n'), ((19980, 20031), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.OK'}), '(code=error_codes_pb2.OK)\n', (20006, 20031), False, 'from graphscope.proto import message_pb2\n'), ((20516, 20628), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.INTERACTIVE_ENGINE_INTERNAL_ERROR', 'error_msg': 'error_message'}), '(code=error_codes_pb2.\n INTERACTIVE_ENGINE_INTERNAL_ERROR, error_msg=error_message)\n', (20542, 20628), False, 'from graphscope.proto import message_pb2\n'), ((22523, 22574), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.OK'}), '(code=error_codes_pb2.OK)\n', (22549, 22574), False, 'from graphscope.proto import message_pb2\n'), ((22982, 23094), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.INTERACTIVE_ENGINE_INTERNAL_ERROR', 'error_msg': 'error_message'}), '(code=error_codes_pb2.\n INTERACTIVE_ENGINE_INTERNAL_ERROR, error_msg=error_message)\n', (23008, 23094), False, 'from graphscope.proto import message_pb2\n'), ((13085, 13155), 'gscoordinator.utils.to_maxgraph_schema', 'to_maxgraph_schema', (['response.graph_def.schema_def.property_schema_json'], {}), '(response.graph_def.schema_def.property_schema_json)\n', (13103, 13155), False, 'from gscoordinator.utils import to_maxgraph_schema\n'), ((22041, 22214), 'graphscope.proto.message_pb2.ResponseStatus', 'message_pb2.ResponseStatus', ([], {'code': 'error_codes_pb2.INTERACTIVE_ENGINE_INTERNAL_ERROR', 'error_msg': "('Internal error during close interactive instance: %d, %s' % (400, e))"}), "(code=error_codes_pb2.\n INTERACTIVE_ENGINE_INTERNAL_ERROR, error_msg=\n 'Internal error during close interactive instance: %d, %s' % (400, e))\n", (22067, 22214), False, 'from graphscope.proto import message_pb2\n'), ((25367, 25410), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {'i': 'obj.vineyard_id'}), '(i=obj.vineyard_id)\n', (25391, 25410), False, 'from graphscope.proto import attr_value_pb2\n')]
# # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # """ Unit tests for degree assortativity coefficient. """ import pytest from graphscope import nx from graphscope.nx.tests.utils import almost_equal @pytest.mark.usefixtures("graphscope_session") class TestDegreeAssortativity: def setup_method(self): self.P4 = nx.path_graph(4) self.D = nx.DiGraph() self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) self.W = nx.Graph() self.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5) self.W.add_edge(0, 2, weight=1) # S1 = nx.star_graph(4) # S2 = nx.star_graph(4) # cls.DS = nx.disjoint_union(S1, S2) # cls.DS.add_edge(4, 5) self.DS = nx.Graph() self.DS.add_edges_from( [(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 6), (5, 7), (5, 8), (5, 9)] ) def teardown_method(self): del self.P4 del self.D del self.W def test_degree_assortativity_undirected1(self): r = nx.builtin.degree_assortativity_coefficient(self.P4) assert almost_equal(r, -1.0 / 2, places=4) def test_degree_assortativity_undirected2(self): r = nx.builtin.degree_assortativity_coefficient(self.P4, x="in", y="in") assert almost_equal(r, -1.0 / 2, places=4) def test_degree_assortativity_undirected3(self): r = nx.builtin.degree_assortativity_coefficient(self.P4, x="in", y="out") assert almost_equal(r, -1.0 / 2, places=4) def test_degree_assortativity_undirected4(self): r = nx.builtin.degree_assortativity_coefficient(self.P4, x="out", y="out") assert almost_equal(r, -1.0 / 2, places=4) def test_degree_assortativity_directed1(self): r = nx.builtin.degree_assortativity_coefficient(self.D) assert almost_equal(r, -0.57735, places=4) def test_degree_assortativity_directed2(self): r = nx.builtin.degree_assortativity_coefficient(self.D, x="in", y="in") assert almost_equal(r, 0.33333, places=4) def test_degree_assortativity_directed3(self): r = nx.builtin.degree_assortativity_coefficient(self.D, x="in", y="out") assert almost_equal(r, -0.33333, places=4) def test_degree_assortativity_directed4(self): r = nx.builtin.degree_assortativity_coefficient(self.D, x="out", y="out") assert almost_equal(r, 0.57735, places=4) def test_degree_assortativity_weighted(self): r = nx.builtin.degree_assortativity_coefficient(self.W, weight="weight") assert almost_equal(r, -0.1429, places=4) def test_degree_assortativity_double_star(self): r = nx.builtin.degree_assortativity_coefficient(self.DS) assert almost_equal(r, -0.9339, places=4)
[ "graphscope.nx.builtin.degree_assortativity_coefficient", "graphscope.nx.tests.utils.almost_equal", "graphscope.nx.DiGraph", "pytest.mark.usefixtures", "graphscope.nx.path_graph", "graphscope.nx.Graph" ]
[((508, 553), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (531, 553), False, 'import pytest\n'), ((631, 647), 'graphscope.nx.path_graph', 'nx.path_graph', (['(4)'], {}), '(4)\n', (644, 647), False, 'from graphscope import nx\n'), ((665, 677), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (675, 677), False, 'from graphscope import nx\n'), ((759, 769), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (767, 769), False, 'from graphscope import nx\n'), ((1037, 1047), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1045, 1047), False, 'from graphscope import nx\n'), ((1331, 1383), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.P4'], {}), '(self.P4)\n', (1374, 1383), False, 'from graphscope import nx\n'), ((1399, 1434), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-1.0 / 2)'], {'places': '(4)'}), '(r, -1.0 / 2, places=4)\n', (1411, 1434), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((1501, 1569), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.P4'], {'x': '"""in"""', 'y': '"""in"""'}), "(self.P4, x='in', y='in')\n", (1544, 1569), False, 'from graphscope import nx\n'), ((1585, 1620), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-1.0 / 2)'], {'places': '(4)'}), '(r, -1.0 / 2, places=4)\n', (1597, 1620), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((1687, 1756), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.P4'], {'x': '"""in"""', 'y': '"""out"""'}), "(self.P4, x='in', y='out')\n", (1730, 1756), False, 'from graphscope import nx\n'), ((1772, 1807), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-1.0 / 2)'], {'places': '(4)'}), '(r, -1.0 / 2, places=4)\n', (1784, 1807), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((1874, 1944), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.P4'], {'x': '"""out"""', 'y': '"""out"""'}), "(self.P4, x='out', y='out')\n", (1917, 1944), False, 'from graphscope import nx\n'), ((1960, 1995), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-1.0 / 2)'], {'places': '(4)'}), '(r, -1.0 / 2, places=4)\n', (1972, 1995), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((2060, 2111), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.D'], {}), '(self.D)\n', (2103, 2111), False, 'from graphscope import nx\n'), ((2127, 2162), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-0.57735)'], {'places': '(4)'}), '(r, -0.57735, places=4)\n', (2139, 2162), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((2227, 2294), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.D'], {'x': '"""in"""', 'y': '"""in"""'}), "(self.D, x='in', y='in')\n", (2270, 2294), False, 'from graphscope import nx\n'), ((2310, 2344), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(0.33333)'], {'places': '(4)'}), '(r, 0.33333, places=4)\n', (2322, 2344), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((2409, 2477), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.D'], {'x': '"""in"""', 'y': '"""out"""'}), "(self.D, x='in', y='out')\n", (2452, 2477), False, 'from graphscope import nx\n'), ((2493, 2528), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-0.33333)'], {'places': '(4)'}), '(r, -0.33333, places=4)\n', (2505, 2528), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((2593, 2662), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.D'], {'x': '"""out"""', 'y': '"""out"""'}), "(self.D, x='out', y='out')\n", (2636, 2662), False, 'from graphscope import nx\n'), ((2678, 2712), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(0.57735)'], {'places': '(4)'}), '(r, 0.57735, places=4)\n', (2690, 2712), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((2776, 2844), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.W'], {'weight': '"""weight"""'}), "(self.W, weight='weight')\n", (2819, 2844), False, 'from graphscope import nx\n'), ((2860, 2894), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-0.1429)'], {'places': '(4)'}), '(r, -0.1429, places=4)\n', (2872, 2894), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((2961, 3013), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.DS'], {}), '(self.DS)\n', (3004, 3013), False, 'from graphscope import nx\n'), ((3029, 3063), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r', '(-0.9339)'], {'places': '(4)'}), '(r, -0.9339, places=4)\n', (3041, 3063), False, 'from graphscope.nx.tests.utils import almost_equal\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # from graphscope.framework.app import AppAssets from graphscope.framework.app import not_compatible_for from graphscope.framework.app import project_to_simple __all__ = ["average_shortest_path_length"] @project_to_simple @not_compatible_for("arrow_property", "dynamic_property", "arrow_flattened") def average_shortest_path_length(G): r"""Returns the average shortest path length. The average shortest path length is .. math:: a =\sum_{s,t \in V} \frac{d(s, t)}{n(n-1)} where `V` is the set of nodes in `G`, `d(s, t)` is the shortest path from `s` to `t`, and `n` is the number of nodes in `G`. Parameters ---------- G : graph """ ctx = AppAssets(algo="sssp_average_length", context="tensor")(G) return ctx.to_numpy("r", axis=0)[0]
[ "graphscope.framework.app.not_compatible_for", "graphscope.framework.app.AppAssets" ]
[((894, 969), 'graphscope.framework.app.not_compatible_for', 'not_compatible_for', (['"""arrow_property"""', '"""dynamic_property"""', '"""arrow_flattened"""'], {}), "('arrow_property', 'dynamic_property', 'arrow_flattened')\n", (912, 969), False, 'from graphscope.framework.app import not_compatible_for\n'), ((1367, 1422), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp_average_length"""', 'context': '"""tensor"""'}), "(algo='sssp_average_length', context='tensor')\n", (1376, 1422), False, 'from graphscope.framework.app import AppAssets\n')]
import networkx.algorithms.traversal.tests.test_edgedfs import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.traversal.tests.test_edgedfs, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.traversal.tests.test_edgedfs import TestEdgeDFS @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestEdgeDFS) class TestEdgeDFS(): @pytest.mark.skip(reason="not support multigraph") def test_multigraph(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multidigraph(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multidigraph_rev(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multidigraph_ignore(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((434, 479), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (457, 479), False, 'import pytest\n'), ((481, 520), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestEdgeDFS'], {}), '(TestEdgeDFS)\n', (507, 520), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((547, 596), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (563, 596), False, 'import pytest\n'), ((647, 696), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (663, 696), False, 'import pytest\n'), ((749, 798), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (765, 798), False, 'import pytest\n'), ((855, 904), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (871, 904), False, 'import pytest\n'), ((310, 355), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (333, 355), False, 'import pytest\n')]
import networkx.algorithms.link_analysis.tests.test_hits import networkx.algorithms.link_analysis.tests.test_pagerank import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.link_analysis.tests.test_hits, decorators=pytest.mark.usefixtures("graphscope_session")) import_as_graphscope_nx(networkx.algorithms.link_analysis.tests.test_pagerank, decorators=pytest.mark.usefixtures("graphscope_session")) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestHITS) class TestHITS: @pytest.mark.skip(reason="builtin app not support raise PowerIterationFailedConvergence") def test_hits_not_convergent(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((585, 630), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (608, 630), False, 'import pytest\n'), ((632, 668), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestHITS'], {}), '(TestHITS)\n', (658, 668), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((690, 783), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""builtin app not support raise PowerIterationFailedConvergence"""'}), "(reason=\n 'builtin app not support raise PowerIterationFailedConvergence')\n", (706, 783), False, 'import pytest\n'), ((373, 418), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (396, 418), False, 'import pytest\n'), ((535, 580), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (558, 580), False, 'import pytest\n')]
# -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import functools import inspect import json import networkx.algorithms as nxa from networkx.utils.decorators import not_implemented_for import graphscope from graphscope import nx from graphscope.framework.app import AppAssets from graphscope.framework.errors import InvalidArgumentError from graphscope.nx.utils.compat import patch_docstring from graphscope.proto import graph_def_pb2 # decorator function def project_to_simple(func): @functools.wraps(func) def wrapper(*args, **kwargs): graph = args[0] if not hasattr(graph, "graph_type"): raise InvalidArgumentError("Missing graph_type attribute in graph object.") elif graph.graph_type in ( graph_def_pb2.DYNAMIC_PROPERTY, graph_def_pb2.ARROW_PROPERTY, ): if ( "weight" in inspect.getfullargspec(func)[0] ): # func has 'weight' argument weight = kwargs.get("weight", None) try: e_label = graph.schema.vertex_labels[0] graph.schema.get_edge_property_id(e_label, weight) except KeyError: weight = None graph = graph._project_to_simple(e_prop=weight) elif "attribute" in inspect.getfullargspec(func)[0]: attribute = kwargs.get("attribute", None) graph = graph._project_to_simple(v_prop=attribute) else: graph = graph._project_to_simple() return func(graph, *args[1:], **kwargs) return wrapper def context_to_dict(func): @functools.wraps(func) def wrapper(*args, **kwargs): ctx = func(*args, **kwargs) graph = args[0] if graph.graph_type == graph_def_pb2.ARROW_FLATTENED: d = dict() df = ctx.to_dataframe( {"label_id": "v.label_id", "id": "v.id", "value": "r"} ) vertex_labels = graph.schema.vertex_labels for row in df.itertuples(): if row.label_id != graph._default_label_id: d[(vertex_labels[row.label_id], row.id)] = row.value else: d[row.id] = row.value return d return ( ctx.to_dataframe({"id": "v.id", "value": "r"}) .set_index("id")["value"] .to_dict() ) return wrapper @context_to_dict @project_to_simple @not_implemented_for("multigraph") def pagerank(G, alpha=0.85, max_iter=100, tol=1.0e-6, weight="weight"): """Returns the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on the structure of the incoming links. It was originally designed as an algorithm to rank web pages. Parameters ---------- G : graph A networkx directed graph. alpha : float, optional Damping parameter for PageRank, default=0.85. max_iter : integer, optional Maximum number of iterations in power method eigenvalue solver. tol : float, optional Error tolerance used to check convergence in power method solver. Returns ------- pagerank : dataframe Dataframe of nodes with PageRank as the value. Examples -------- >>> G = nx.DiGraph(nx.path_graph(4)) >>> pr = nx.pagerank(G, alpha=0.9) Notes ----- The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after an error tolerance of ``len(G) * tol`` has been reached. If the number of iterations exceed `max_iter`, computation just complete and return the current result. The PageRank algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed. References ---------- .. [1] <NAME> and <NAME>, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] <NAME>; <NAME>; <NAME> and <NAME>, The PageRank citation ranking: Bringing order to the Web. 1999 http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf """ return graphscope.pagerank_nx(G, alpha, max_iter, tol) @not_implemented_for("multigraph") @patch_docstring(nxa.hits) def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): # TODO(@weibin): raise PowerIterationFailedConvergence if hits fails to converge # within the specified number of iterations. @project_to_simple def _hits(G, max_iter=100, tol=1.0e-8, normalized=True): ctx = graphscope.hits( G, tolerance=tol, max_round=max_iter, normalized=normalized ) df = ctx.to_dataframe({"id": "v.id", "auth": "r.auth", "hub": "r.hub"}) return ( df.set_index("id")["hub"].to_dict(), df.set_index("id")["auth"].to_dict(), ) if nstart is not None: # forward return nxa.hits(G, max_iter, tol, nstart, normalized) if max_iter == 0: raise nx.PowerIterationFailedConvergence(max_iter) if len(G) == 0: return {}, {} return _hits(G, max_iter, tol, normalized) def hits_scipy(G, max_iter=100, tol=1.0e-8, normalized=True): return hits(G, max_iter=max_iter, tol=tol, normalized=normalized) @context_to_dict @project_to_simple @patch_docstring(nxa.degree_centrality) def degree_centrality(G): return graphscope.degree_centrality(G, centrality_type="both") @context_to_dict @project_to_simple @not_implemented_for("undirected") @patch_docstring(nxa.in_degree_centrality) def in_degree_centrality(G): return graphscope.degree_centrality(G, centrality_type="in") @context_to_dict @project_to_simple @not_implemented_for("undirected") @patch_docstring(nxa.out_degree_centrality) def out_degree_centrality(G): return graphscope.degree_centrality(G, centrality_type="out") @not_implemented_for("multigraph") @patch_docstring(nxa.eigenvector_centrality) def eigenvector_centrality(G, max_iter=100, tol=1e-06, nstart=None, weight=None): # TODO(@weibin): raise PowerIterationFailedConvergence if eigenvector fails to converge # within the specified number of iterations. @context_to_dict @project_to_simple def _eigenvector_centrality(G, max_iter=100, tol=1e-06, weight=None): return graphscope.eigenvector_centrality(G, tolerance=tol, max_round=max_iter) if nstart is not None: # forward the nxa.eigenvector_centrality return nxa.eigenvector_centrality(G, max_iter, tol, nstart, weight) if len(G) == 0: raise nx.NetworkXPointlessConcept( "cannot compute centrality for the null graph" ) if max_iter == 0: raise nx.PowerIterationFailedConvergence(max_iter) return _eigenvector_centrality(G, max_iter=max_iter, tol=tol, weight=weight) @not_implemented_for("multigraph") @patch_docstring(nxa.katz_centrality) def katz_centrality( G, alpha=0.1, beta=1.0, max_iter=100, tol=1e-06, nstart=None, normalized=True, weight=None, ): # TODO(@weibin): raise PowerIterationFailedConvergence if katz fails to converge # within the specified number of iterations. @context_to_dict @project_to_simple def _katz_centrality( G, alpha=0.1, beta=1.0, max_iter=100, tol=1e-06, normalized=True, weight=None, ): return graphscope.katz_centrality( G, alpha=alpha, beta=beta, tolerance=tol, max_round=max_iter, normalized=normalized, ) if nstart is not None or isinstance(beta, dict): # forward the nxa.katz_centrality return nxa.katz_centrality( G, alpha, beta, max_iter, tol, nstart, normalized, weight ) if len(G) == 0: return {} if not isinstance(beta, (int, float)): raise nx.NetworkXError("beta should be number, not {}".format(type(beta))) if max_iter == 0: raise nx.PowerIterationFailedConvergence(max_iter) return _katz_centrality( G, alpha=alpha, beta=beta, tol=tol, max_iter=max_iter, normalized=normalized, weight=weight, ) @project_to_simple @patch_docstring(nxa.has_path) def has_path(G, source, target): ctx = AppAssets(algo="sssp_has_path", context="tensor")(G, source, target) return ctx.to_numpy("r", axis=0)[0] @project_to_simple @patch_docstring(nxa.shortest_path) def shortest_path(G, source=None, target=None, weight=None): return AppAssets(algo="sssp_path", context="tensor")(G, source) @context_to_dict @project_to_simple def single_source_dijkstra_path_length(G, source, weight=None): """Find shortest weighted path lengths in G from a source node. Compute the shortest path length between source and all other reachable nodes for a weighted graph. Parameters ---------- G : networkx graph source : node label Starting node for path weight : string the edge weights will be accessed via the edge attribute with this key (that is, the weight of the edge joining `u` to `v` will be ``G.edges[u, v][weight]``). Returns ------- length : dataframe Dataframe by node to shortest path length from source. Examples -------- >>> G = nx.path_graph(5) >>> length = nx.single_source_dijkstra_path_length(G, 0) Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. """ return AppAssets(algo="sssp_projected", context="vertex_data")(G, source) @patch_docstring(nxa.average_shortest_path_length) def average_shortest_path_length(G, weight=None, method=None): @project_to_simple def _average_shortest_path_length(G, weight=None): return graphscope.average_shortest_path_length(G) if method is not None: return nxa.average_shortest_path_length(G, weight, method) n = len(G) # For the specail case of the null graph. raise an exception, since # there are no paths in the null graph. if n == 0: msg = ( "the null graph has no paths, thus there is no average" "shortest path length" ) raise nx.NetworkXPointlessConcept(msg) # For the special case of the trivial graph, return zero immediately. if n == 1: return 0 return _average_shortest_path_length(G, weight=weight) @project_to_simple def bfs_edges(G, source, depth_limit=None): """edges in a breadth-first-search starting at source. Parameters ---------- G : networkx graph source : node Specify starting node for breadth-first search; this function iterates over only those edges in the component reachable from this node. depth_limit : int, optional(default=len(G)) Specify the maximum search depth Returns ------- edges: list A list of edges in the breadth-first-search. Examples -------- To get the edges in a breadth-first search:: >>> G = nx.path_graph(3) >>> list(nx.bfs_edges(G, 0)) [(0, 1), (1, 2)] >>> list(nx.bfs_edges(G, source=0, depth_limit=1)) [(0, 1)] """ # FIXME: reverse not support. ctx = AppAssets(algo="bfs_generic", context="tensor")( G, source, depth_limit, format="edges" ) return ctx.to_numpy("r", axis=0).tolist() @project_to_simple @patch_docstring(nxa.bfs_predecessors) def bfs_predecessors(G, source, depth_limit=None): return AppAssets(algo="bfs_generic", context="tensor")( G, source, depth_limit, format="predecessors" ) @project_to_simple @patch_docstring(nxa.bfs_successors) def bfs_successors(G, source, depth_limit=None): return AppAssets(algo="bfs_generic", context="tensor")( G, source, depth_limit, format="successors" ) @project_to_simple def all_pairs_shortest_path_length(G, weight=None): """Compute shortest path lengths between all nodes in a graph. Parameters ---------- G : networkx graph weight : string (defualt=None) edge weights will be accessed via the edge attribute with this key (that is, the weight of the edge joining `u` to `v` will be ``G.edges[u, v][weight]``). If is None, every edge is assume to be one. Returns ------- :class:`DynamicVertexDataContext`: A context with each vertex assigned with the shortest distance. One can use the context to access node's distance result or iterate by nodes. Examples -------- >>> G = nx.path_graph(5) >>> length = dict(nx.all_pairs_dijkstra_path_length(G)) >>> for node in [0, 1, 2, 3, 4]: ... print(f"1 - {node}: {length[1][node]}") 1 - 0: 1 1 - 1: 0 1 - 2: 1 1 - 3: 2 1 - 4: 3 >>> length[3][2] 1 >>> length[2][2] 0 Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. """ return AppAssets(algo="all_pairs_shortest_path_length", context="vertex_data")(G) @patch_docstring(nxa.closeness_centrality) def closeness_centrality(G, u=None, distance=None, wf_improved=True): @context_to_dict @project_to_simple def _closeness_centrality(G, weight=None, wf_improved=True): return AppAssets(algo="closeness_centrality", context="vertex_data")( G, wf_improved ) if u is not None: # forward return nxa.closeness_centrality(G, u, distance, wf_improved) return _closeness_centrality(G, weight=distance, wf_improved=wf_improved) @patch_docstring(nxa.bfs_tree) def bfs_tree(G, source, reverse=False, depth_limit=None): """Returns an oriented tree constructed from of a breadth-first-search starting at source. Parameters ---------- G : networkx graph source : node Specify starting node for breadth-first search depth_limit : int, optional(default=len(G)) Specify the maximum search depth Returns ------- T: networkx DiGraph An oriented tree Notes ----- Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py by <NAME>, July 2004. The modifications to allow depth limits based on the Wikipedia article "`Depth-limited-search`_". .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search """ T = nx.DiGraph() T.add_node(source) edges_gen = bfs_edges(G, source, depth_limit=depth_limit) T.add_edges_from(edges_gen) return T @project_to_simple def k_core(G, k=None, core_number=None): """Returns the k-core of G. A k-core is a maximal subgraph that contains nodes of degree k or more. Parameters ---------- G : networkx graph A graph or directed graph k : int, optional The order of the core. If not specified return the main core. Returns ------- :class:`VertexDataContext`: A context with each vertex assigned with a boolean: 1 if the vertex satisfies k-core, otherwise 0. References ---------- .. [1] An O(m) Algorithm for Cores Decomposition of Networks <NAME> and <NAME>, 2003. https://arxiv.org/abs/cs.DS/0310049 """ # FIXME: core number not support. return graphscope.k_core(G, k) @context_to_dict @project_to_simple def clustering(G): r"""Compute the clustering coefficient for nodes. For unweighted graphs, the clustering of a node :math:`u` is the fraction of possible triangles through that node that exist, .. math:: c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)}, where :math:`T(u)` is the number of triangles through node :math:`u` and :math:`deg(u)` is the degree of :math:`u`. For weighted graphs, there are several ways to define clustering [1]_. the one used here is defined as the geometric average of the subgraph edge weights [2]_, .. math:: c_u = \frac{1}{deg(u)(deg(u)-1))} \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}. The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`. The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`. For directed graphs, the clustering is similarly defined as the fraction of all possible directed triangles or geometric average of the subgraph edge weights for unweighted and weighted directed graph respectively [3]_. .. math:: c_u = \frac{1}{deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u)} T(u), where :math:`T(u)` is the number of directed triangles through node :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of :math:`u`. Parameters ---------- G : graph Returns ------- out : dataframe Clustering coefficient at nodes Examples -------- >>> G = nx.path_graph(5) >>>nx.clustering(G) References ---------- .. [1] Generalizations of the clustering coefficient to weighted complex networks by J. Saramäki, <NAME>, <NAME>, <NAME>, and <NAME>, Physical Review E, 75 027105 (2007). http://jponnela.com/web_documents/a9.pdf .. [2] Intensity and coherence of motifs in weighted complex networks by <NAME>, <NAME>, <NAME>, and <NAME>, Physical Review E, 71(6), 065103 (2005). .. [3] Clustering in complex directed networks by <NAME>, Physical Review E, 76(2), 026107 (2007). """ # FIXME(weibin): clustering now only correct in directed graph. # FIXME: nodes and weight not support. return graphscope.clustering(G) @context_to_dict @project_to_simple def triangles(G, nodes=None): """Compute the number of triangles. Finds the number of triangles that include a node as one vertex. Parameters ---------- G : graph A networkx graph Returns ------- out : dataframe Number of triangles keyed by node label. Notes ----- When computing triangles for the entire graph each triangle is counted three times, once at each node. Self loops are ignored. """ # FIXME: nodes not support. return graphscope.triangles(G) @project_to_simple @patch_docstring(nxa.transitivity) def transitivity(G): # FIXME: nodes not support. return AppAssets(algo="transitivity", context="tensor")(G) @project_to_simple @patch_docstring(nxa.average_clustering) def average_clustering(G, nodes=None, count_zeros=True): """Compute the average clustering coefficient for the graph G. The clustering coefficient for the graph is the average, .. math:: C = \frac{1}{n}\sum_{v \in G} c_v, where :math:`n` is the number of nodes in `G`. Parameters ---------- G : graph Returns ------- avg : float Average clustering Examples -------- >>> G = nx.complete_graph(5) >>> print(nx.average_clustering(G)) 1.0 Notes ----- This is a space saving routine; it might be faster to use the clustering function to get a list and then take the average. Self loops are ignored. References ---------- .. [1] Generalizations of the clustering coefficient to weighted complex networks by <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>, Physical Review E, 75 027105 (2007). http://jponnela.com/web_documents/a9.pdf .. [2] <NAME>, Mean clustering coefficients: the role of isolated nodes and leafs on clustering measures for small-world networks. https://arxiv.org/abs/0802.2512 """ # FIXME: nodes, weight, count_zeros not support. ctx = AppAssets(algo="avg_clustering", context="tensor")(G) return ctx.to_numpy("r")[0] @context_to_dict @project_to_simple def weakly_connected_components(G): """Generate weakly connected components of G. Parameters ---------- G : networkx graph A directed graph Returns ------- comp :class:`VertexDataContext`: A context with each vertex assigned with a boolean: 1 if the vertex satisfies k-core, otherwise 0. """ return AppAssets(algo="wcc_projected", context="vertex_data")(G) @project_to_simple def degree_assortativity_coefficient(G, x="out", y="in", weight=None): """Compute degree assortativity of graph. Assortativity measures the similarity of connections in the graph with respect to the node degree. Parameters ---------- G : NetworkX graph x: string ('in','out') The degree type for source node (directed graphs only). y: string ('in','out') The degree type for target node (directed graphs only). weighted: bool (True, False) weighted graph or unweighted graph Returns ------- r : float Assortativity of graph by degree. Examples -------- >>> G = nx.path_graph(4) >>> r = nx.builtin.degree_assortativity_coefficient(G) >>> print(f"{r:3.1f}") -0.5 See Also -------- attribute_assortativity_coefficient Notes ----- This computes Eq. (21) in Ref. [1]_ , where e is the joint probability distribution (mixing matrix) of the degrees. If G is directed than the matrix e is the joint probability of the user-specified degree type for the source and target. References ---------- .. [1] <NAME>, Mixing patterns in networks, Physical Review E, 67 026126, 2003 .. [2] <NAME>., <NAME>., <NAME>. & <NAME>. Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). """ return graphscope.degree_assortativity_coefficient(G, x, y, weight) @patch_docstring(nxa.node_boundary) def node_boundary(G, nbunch1, nbunch2=None): @project_to_simple def _node_boundary(G, nbunch1, nbunch2=None): n1json = json.dumps(list(nbunch1)) if nbunch2 is not None: n2json = json.dumps(list(nbunch2)) else: n2json = "" ctx = AppAssets(algo="node_boundary", context="tensor")(G, n1json, n2json) return set(ctx.to_numpy("r", axis=0).tolist()) if G.is_multigraph(): # forward to the NetworkX node_boundary return nxa.node_boundary(G, nbunch1, nbunch2) return _node_boundary(G, nbunch1, nbunch2) @patch_docstring(nxa.edge_boundary) def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): @project_to_simple def _boundary(G, nbunch1, nbunch2=None): n1json = json.dumps(list(nbunch1)) if nbunch2: n2json = json.dumps(list(nbunch2)) else: n2json = "" ctx = AppAssets(algo="edge_boundary", context="tensor")(G, n1json, n2json) ret = ctx.to_numpy("r", axis=0).tolist() for e in ret: yield (e[0], e[1]) if G.is_multigraph(): # forward the NetworkX edge boundary return nxa.edge_boundary(G, nbunch1, nbunch2, data, keys, default) return _boundary(G, nbunch1, nbunch2) @project_to_simple def average_degree_connectivity(G, source="in+out", target="in+out", weight=None): """Compute the average degree connectivity of graph. The average degree connectivity is the average nearest neighbor degree of nodes with degree k. For weighted graphs, an analogous measure can be computed using the weighted average neighbors degree defined in [1]_, for a node `i`, as .. math:: k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j where `s_i` is the weighted degree of node `i`, `w_{ij}` is the weight of the edge that links `i` and `j`, and `N(i)` are the neighbors of node `i`. Parameters ---------- G : NetworkX graph source : "in"|"out"|"in+out" (default:"in+out") Directed graphs only. Use "in"- or "out"-degree for source node. target : "in"|"out"|"in+out" (default:"in+out" Directed graphs only. Use "in"- or "out"-degree for target node. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. Returns ------- d : dict A dictionary keyed by degree k with the value of average connectivity. Raises ------ ValueError If either `source` or `target` are not one of 'in', 'out', or 'in+out'. Examples -------- >>> G = nx.Graph() >>> G.add_edge(1, 2, weight=3) >>> G.add_edges_from([(0, 1), (2, 3)], weight=1) >>> nx.builtin.average_degree_connectivity(G) {1: 2.0, 2: 1.5} >>> nx.builtin.average_degree_connectivity(G, weight="weight") {1: 2.0, 2: 1.75} References ---------- .. [1] <NAME>, <NAME>, <NAME>, and <NAME>, "The architecture of complex weighted networks". PNAS 101 (11): 3747–3752 (2004). """ return graphscope.average_degree_connectivity(G, source, target, weight) @project_to_simple def attribute_assortativity_coefficient(G, attribute): """Compute assortativity for node attributes. Assortativity measures the similarity of connections in the graph with respect to the given attribute. Parameters ---------- G : NetworkX graph attribute : string Node attribute key Returns ------- r: float Assortativity of graph for given attribute Examples -------- >>> G = nx.Graph() >>> G.add_nodes_from([0, 1], color="red") >>> G.add_nodes_from([2, 3], color="blue") >>> G.add_edges_from([(0, 1), (2, 3)]) >>> print(nx.builtin.attribute_assortativity_coefficient(G, "color")) 1.0 Notes ----- This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)), where M is the joint probability distribution (mixing matrix) of the specified attribute. References ---------- .. [1] <NAME>, Mixing patterns in networks, Physical Review E, 67 026126, 2003 """ return graphscope.attribute_assortativity_coefficient(G) @project_to_simple def numeric_assortativity_coefficient(G, attribute): """Compute assortativity for numerical node attributes. Assortativity measures the similarity of connections in the graph with respect to the given numeric attribute. Parameters ---------- G : NetworkX graph attribute : string Node attribute key. Returns ------- r: float Assortativity of graph for given attribute Examples -------- >>> G = nx.Graph() >>> G.add_nodes_from([0, 1], size=2) >>> G.add_nodes_from([2, 3], size=3) >>> G.add_edges_from([(0, 1), (2, 3)]) >>> print(nx.builtin.numeric_assortativity_coefficient(G, "size")) 1.0 Notes ----- This computes Eq. (21) in Ref. [1]_ , for the mixing matrix of the specified attribute. References ---------- .. [1] <NAME>, Mixing patterns in networks Physical Review E, 67 026126, 2003 """ return graphscope.numeric_assortativity_coefficient(G) @patch_docstring(nxa.is_simple_path) def is_simple_path(G, nodes): @project_to_simple def _is_simple_path(G, nodes): return graphscope.is_simple_path(G, nodes) if G.is_multigraph(): # forward the networkx.is_simple_graph return nxa.is_simple_path(G, nodes) return _is_simple_path(G, nodes) def get_all_simple_paths(G, source, target_nodes, cutoff): @project_to_simple def _all_simple_paths(G, source, target_nodes, cutoff): targets_json = json.dumps(target_nodes) return AppAssets(algo="all_simple_paths", context="tensor")( G, source, targets_json, cutoff ) if not isinstance(target_nodes, list): target_nodes = [target_nodes] if source not in G or len(target_nodes) != len(list(G.nbunch_iter(target_nodes))): raise ValueError("nx.NodeNotFound") if cutoff is None: cutoff = len(G) - 1 if cutoff < 1 or source in target_nodes: return [] ctx = _all_simple_paths(G, source, list(set(target_nodes)), cutoff) paths = ctx.to_numpy("r", axis=0).tolist() if len(paths) == 1: if not isinstance(paths[0], list): return [] return paths def all_simple_paths(G, source, target_nodes, cutoff=None): """Generate all simple paths in the graph G from source to target. A simple path is a path with no repeated nodes. Parameters ---------- G : NetworkX graph source : node Starting node for path target : nodes Single node or iterable of nodes at which to end path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- paths: list A list that produces lists of simple paths. If there are no paths between the source and target within the given cutoff the list is empty. Examples -------- >>> G = nx.complete_graph(4) >>> print(nx.builtin.all_simple_paths(G, 0, 3)) ... [0, 1, 2, 3] [0, 1, 3] [0, 2, 1, 3] [0, 2, 3] [0, 3] """ paths = get_all_simple_paths(G, source, target_nodes, cutoff) # delte path tail padding for path in paths: for i in range(len(path) - 1, -1, -1): if path[i] == -1: path.pop(i) else: break return paths def all_simple_edge_paths(G, source, target_nodes, cutoff=None): """Generate lists of edges for all simple paths in G from source to target. A simple path is a path with no repeated nodes. Parameters ---------- G : NetworkX graph source : node Starting node for path target : nodes Single node or iterable of nodes at which to end path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- paths: list A list that produces lists of simple edge paths. If there are no paths between the source and target within the given cutoff the list is empty. Examples -------- Print the simple path edges of a Graph:: >>> g = nx.Graph([(1, 2), (2, 4), (1, 3), (3, 4)]) >>> print(nx.builtin.all_simple_paths(G, 1, 4)) [(1, 2), (2, 4)] [(1, 3), (3, 4)] """ paths = get_all_simple_paths(G, source, target_nodes, cutoff) for path in paths: a = "" b = "" for i in range(len(path) - 1, -1, -1): if path[i] == -1: a = path.pop(i) else: b = path.pop(i) if a != -1 and a != "": path.insert(i, (b, a)) a = b return paths def betweenness_centrality( G, k=None, normalized=True, weight=None, endpoints=False, seed=None ): r"""Compute the shortest-path betweenness centrality for nodes. Betweenness centrality of a node $v$ is the sum of the fraction of all-pairs shortest paths that pass through $v$ .. math:: c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)} where $V$ is the set of nodes, $\sigma(s, t)$ is the number of shortest $(s, t)$-paths, and $\sigma(s, t|v)$ is the number of those paths passing through some node $v$ other than $s, t$. If $s = t$, $\sigma(s, t) = 1$, and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_. Parameters ---------- G : graph A NetworkX graph. normalized : bool, optional If True the betweenness values are normalized by `2/((n-1)(n-2))` for graphs, and `1/((n-1)(n-2))` for directed graphs where `n` is the number of nodes in G. weight : None or string, optional (default=None) If None, all edge weights are considered equal. Otherwise holds the name of the edge attribute used as weight. Weights are used to calculate weighted shortest paths, so they are interpreted as distances. endpoints : bool, optional If True include the endpoints in the shortest path counts. Returns ------- nodes : dictionary Dictionary of nodes with betweenness centrality as the value. See Also -------- edge_betweenness_centrality load_centrality Notes ----- The algorithm is from <NAME> [1]_. See [4]_ for the original first published version and [2]_ for details on algorithms for variations and related metrics. For approximate betweenness calculations set k=#samples to use k nodes ("pivots") to estimate the betweenness values. For an estimate of the number of pivots needed see [3]_. For weighted graphs the edge weights must be greater than zero. Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. The total number of paths between source and target is counted differently for directed and undirected graphs. Directed paths are easy to count. Undirected paths are tricky: should a path from "u" to "v" count as 1 undirected path or as 2 directed paths? For betweenness_centrality we report the number of undirected paths when G is undirected. For betweenness_centrality_subset the reporting is different. If the source and target subsets are the same, then we want to count undirected paths. But if the source and target subsets differ -- for example, if sources is {0} and targets is {1}, then we are only counting the paths in one direction. They are undirected paths but we are counting them in a directed way. To count them as undirected paths, each should count as half a path. References ---------- .. [1] <NAME>: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001. https://doi.org/10.1080/0022250X.2001.9990249 .. [2] <NAME>: On Variants of Shortest-Path Betweenness Centrality and their Generic Computation. Social Networks 30(2):136-145, 2008. https://doi.org/10.1016/j.socnet.2007.11.001 .. [3] <NAME> and <NAME>: Centrality Estimation in Large Networks. International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007. https://dx.doi.org/10.1142/S0218127407018403 .. [4] <NAME>: A set of measures of centrality based on betweenness. Sociometry 40: 35–41, 1977 https://doi.org/10.2307/3033543 """ @context_to_dict @project_to_simple def _betweenness_centrality( G, k=None, normalized=True, weight=None, endpoints=False, seed=None ): algorithm = "betweenness_centrality" if weight is not None: algorithm = "betweenness_centrality_generic" return AppAssets(algo=algorithm, context="vertex_data")( G, normalized=normalized, endpoints=endpoints ) if not isinstance(G, nx.Graph): return nxa.betweenness_centrality(G, k, normalized, weight, endpoints, seed) return _betweenness_centrality( G, k=k, normalized=normalized, weight=weight, endpoints=endpoints, seed=seed )
[ "networkx.utils.decorators.not_implemented_for", "graphscope.average_degree_connectivity", "graphscope.clustering", "graphscope.pagerank_nx", "graphscope.katz_centrality", "inspect.getfullargspec", "networkx.algorithms.eigenvector_centrality", "graphscope.attribute_assortativity_coefficient", "graphscope.k_core", "graphscope.framework.errors.InvalidArgumentError", "networkx.algorithms.hits", "networkx.algorithms.katz_centrality", "json.dumps", "functools.wraps", "graphscope.framework.app.AppAssets", "networkx.algorithms.betweenness_centrality", "networkx.algorithms.closeness_centrality", "networkx.algorithms.node_boundary", "graphscope.eigenvector_centrality", "graphscope.nx.DiGraph", "networkx.algorithms.average_shortest_path_length", "graphscope.is_simple_path", "graphscope.nx.NetworkXPointlessConcept", "graphscope.nx.PowerIterationFailedConvergence", "graphscope.degree_assortativity_coefficient", "networkx.algorithms.is_simple_path", "graphscope.average_shortest_path_length", "graphscope.nx.utils.compat.patch_docstring", "graphscope.triangles", "networkx.algorithms.edge_boundary", "graphscope.hits", "graphscope.numeric_assortativity_coefficient", "graphscope.degree_centrality" ]
[((2825, 2858), 'networkx.utils.decorators.not_implemented_for', 'not_implemented_for', (['"""multigraph"""'], {}), "('multigraph')\n", (2844, 2858), False, 'from networkx.utils.decorators import not_implemented_for\n'), ((4660, 4693), 'networkx.utils.decorators.not_implemented_for', 'not_implemented_for', (['"""multigraph"""'], {}), "('multigraph')\n", (4679, 4693), False, 'from networkx.utils.decorators import not_implemented_for\n'), ((4695, 4720), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.hits'], {}), '(nxa.hits)\n', (4710, 4720), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((5778, 5816), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.degree_centrality'], {}), '(nxa.degree_centrality)\n', (5793, 5816), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((5949, 5982), 'networkx.utils.decorators.not_implemented_for', 'not_implemented_for', (['"""undirected"""'], {}), "('undirected')\n", (5968, 5982), False, 'from networkx.utils.decorators import not_implemented_for\n'), ((5984, 6025), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.in_degree_centrality'], {}), '(nxa.in_degree_centrality)\n', (5999, 6025), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((6159, 6192), 'networkx.utils.decorators.not_implemented_for', 'not_implemented_for', (['"""undirected"""'], {}), "('undirected')\n", (6178, 6192), False, 'from networkx.utils.decorators import not_implemented_for\n'), ((6194, 6236), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.out_degree_centrality'], {}), '(nxa.out_degree_centrality)\n', (6209, 6236), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((6336, 6369), 'networkx.utils.decorators.not_implemented_for', 'not_implemented_for', (['"""multigraph"""'], {}), "('multigraph')\n", (6355, 6369), False, 'from networkx.utils.decorators import not_implemented_for\n'), ((6371, 6414), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.eigenvector_centrality'], {}), '(nxa.eigenvector_centrality)\n', (6386, 6414), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((7293, 7326), 'networkx.utils.decorators.not_implemented_for', 'not_implemented_for', (['"""multigraph"""'], {}), "('multigraph')\n", (7312, 7326), False, 'from networkx.utils.decorators import not_implemented_for\n'), ((7328, 7364), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.katz_centrality'], {}), '(nxa.katz_centrality)\n', (7343, 7364), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((8732, 8761), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.has_path'], {}), '(nxa.has_path)\n', (8747, 8761), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((8936, 8970), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.shortest_path'], {}), '(nxa.shortest_path)\n', (8951, 8970), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((10140, 10189), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.average_shortest_path_length'], {}), '(nxa.average_shortest_path_length)\n', (10155, 10189), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((11980, 12017), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.bfs_predecessors'], {}), '(nxa.bfs_predecessors)\n', (11995, 12017), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((12211, 12246), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.bfs_successors'], {}), '(nxa.bfs_successors)\n', (12226, 12246), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((13634, 13675), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.closeness_centrality'], {}), '(nxa.closeness_centrality)\n', (13649, 13675), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((14161, 14190), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.bfs_tree'], {}), '(nxa.bfs_tree)\n', (14176, 14190), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((18892, 18925), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.transitivity'], {}), '(nxa.transitivity)\n', (18907, 18925), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((19064, 19103), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.average_clustering'], {}), '(nxa.average_clustering)\n', (19079, 19103), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((22316, 22350), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.node_boundary'], {}), '(nxa.node_boundary)\n', (22331, 22350), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((22946, 22980), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.edge_boundary'], {}), '(nxa.edge_boundary)\n', (22961, 22980), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((27683, 27718), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['nxa.is_simple_path'], {}), '(nxa.is_simple_path)\n', (27698, 27718), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((822, 843), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (837, 843), False, 'import functools\n'), ((1985, 2006), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (2000, 2006), False, 'import functools\n'), ((4609, 4656), 'graphscope.pagerank_nx', 'graphscope.pagerank_nx', (['G', 'alpha', 'max_iter', 'tol'], {}), '(G, alpha, max_iter, tol)\n', (4631, 4656), False, 'import graphscope\n'), ((5854, 5909), 'graphscope.degree_centrality', 'graphscope.degree_centrality', (['G'], {'centrality_type': '"""both"""'}), "(G, centrality_type='both')\n", (5882, 5909), False, 'import graphscope\n'), ((6066, 6119), 'graphscope.degree_centrality', 'graphscope.degree_centrality', (['G'], {'centrality_type': '"""in"""'}), "(G, centrality_type='in')\n", (6094, 6119), False, 'import graphscope\n'), ((6278, 6332), 'graphscope.degree_centrality', 'graphscope.degree_centrality', (['G'], {'centrality_type': '"""out"""'}), "(G, centrality_type='out')\n", (6306, 6332), False, 'import graphscope\n'), ((14948, 14960), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (14958, 14960), False, 'from graphscope import nx\n'), ((15829, 15852), 'graphscope.k_core', 'graphscope.k_core', (['G', 'k'], {}), '(G, k)\n', (15846, 15852), False, 'import graphscope\n'), ((18272, 18296), 'graphscope.clustering', 'graphscope.clustering', (['G'], {}), '(G)\n', (18293, 18296), False, 'import graphscope\n'), ((18846, 18869), 'graphscope.triangles', 'graphscope.triangles', (['G'], {}), '(G)\n', (18866, 18869), False, 'import graphscope\n'), ((22252, 22312), 'graphscope.degree_assortativity_coefficient', 'graphscope.degree_assortativity_coefficient', (['G', 'x', 'y', 'weight'], {}), '(G, x, y, weight)\n', (22295, 22312), False, 'import graphscope\n'), ((25521, 25586), 'graphscope.average_degree_connectivity', 'graphscope.average_degree_connectivity', (['G', 'source', 'target', 'weight'], {}), '(G, source, target, weight)\n', (25559, 25586), False, 'import graphscope\n'), ((26620, 26669), 'graphscope.attribute_assortativity_coefficient', 'graphscope.attribute_assortativity_coefficient', (['G'], {}), '(G)\n', (26666, 26669), False, 'import graphscope\n'), ((27632, 27679), 'graphscope.numeric_assortativity_coefficient', 'graphscope.numeric_assortativity_coefficient', (['G'], {}), '(G)\n', (27676, 27679), False, 'import graphscope\n'), ((5022, 5098), 'graphscope.hits', 'graphscope.hits', (['G'], {'tolerance': 'tol', 'max_round': 'max_iter', 'normalized': 'normalized'}), '(G, tolerance=tol, max_round=max_iter, normalized=normalized)\n', (5037, 5098), False, 'import graphscope\n'), ((5388, 5434), 'networkx.algorithms.hits', 'nxa.hits', (['G', 'max_iter', 'tol', 'nstart', 'normalized'], {}), '(G, max_iter, tol, nstart, normalized)\n', (5396, 5434), True, 'import networkx.algorithms as nxa\n'), ((5471, 5515), 'graphscope.nx.PowerIterationFailedConvergence', 'nx.PowerIterationFailedConvergence', (['max_iter'], {}), '(max_iter)\n', (5505, 5515), False, 'from graphscope import nx\n'), ((6771, 6842), 'graphscope.eigenvector_centrality', 'graphscope.eigenvector_centrality', (['G'], {'tolerance': 'tol', 'max_round': 'max_iter'}), '(G, tolerance=tol, max_round=max_iter)\n', (6804, 6842), False, 'import graphscope\n'), ((6935, 6995), 'networkx.algorithms.eigenvector_centrality', 'nxa.eigenvector_centrality', (['G', 'max_iter', 'tol', 'nstart', 'weight'], {}), '(G, max_iter, tol, nstart, weight)\n', (6961, 6995), True, 'import networkx.algorithms as nxa\n'), ((7030, 7105), 'graphscope.nx.NetworkXPointlessConcept', 'nx.NetworkXPointlessConcept', (['"""cannot compute centrality for the null graph"""'], {}), "('cannot compute centrality for the null graph')\n", (7057, 7105), False, 'from graphscope import nx\n'), ((7164, 7208), 'graphscope.nx.PowerIterationFailedConvergence', 'nx.PowerIterationFailedConvergence', (['max_iter'], {}), '(max_iter)\n', (7198, 7208), False, 'from graphscope import nx\n'), ((7874, 7989), 'graphscope.katz_centrality', 'graphscope.katz_centrality', (['G'], {'alpha': 'alpha', 'beta': 'beta', 'tolerance': 'tol', 'max_round': 'max_iter', 'normalized': 'normalized'}), '(G, alpha=alpha, beta=beta, tolerance=tol,\n max_round=max_iter, normalized=normalized)\n', (7900, 7989), False, 'import graphscope\n'), ((8180, 8258), 'networkx.algorithms.katz_centrality', 'nxa.katz_centrality', (['G', 'alpha', 'beta', 'max_iter', 'tol', 'nstart', 'normalized', 'weight'], {}), '(G, alpha, beta, max_iter, tol, nstart, normalized, weight)\n', (8199, 8258), True, 'import networkx.algorithms as nxa\n'), ((8481, 8525), 'graphscope.nx.PowerIterationFailedConvergence', 'nx.PowerIterationFailedConvergence', (['max_iter'], {}), '(max_iter)\n', (8515, 8525), False, 'from graphscope import nx\n'), ((8805, 8854), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp_has_path"""', 'context': '"""tensor"""'}), "(algo='sssp_has_path', context='tensor')\n", (8814, 8854), False, 'from graphscope.framework.app import AppAssets\n'), ((9043, 9088), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp_path"""', 'context': '"""tensor"""'}), "(algo='sssp_path', context='tensor')\n", (9052, 9088), False, 'from graphscope.framework.app import AppAssets\n'), ((10070, 10125), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp_projected"""', 'context': '"""vertex_data"""'}), "(algo='sssp_projected', context='vertex_data')\n", (10079, 10125), False, 'from graphscope.framework.app import AppAssets\n'), ((10346, 10388), 'graphscope.average_shortest_path_length', 'graphscope.average_shortest_path_length', (['G'], {}), '(G)\n', (10385, 10388), False, 'import graphscope\n'), ((10432, 10483), 'networkx.algorithms.average_shortest_path_length', 'nxa.average_shortest_path_length', (['G', 'weight', 'method'], {}), '(G, weight, method)\n', (10464, 10483), True, 'import networkx.algorithms as nxa\n'), ((10773, 10805), 'graphscope.nx.NetworkXPointlessConcept', 'nx.NetworkXPointlessConcept', (['msg'], {}), '(msg)\n', (10800, 10805), False, 'from graphscope import nx\n'), ((11810, 11857), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""bfs_generic"""', 'context': '"""tensor"""'}), "(algo='bfs_generic', context='tensor')\n", (11819, 11857), False, 'from graphscope.framework.app import AppAssets\n'), ((12080, 12127), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""bfs_generic"""', 'context': '"""tensor"""'}), "(algo='bfs_generic', context='tensor')\n", (12089, 12127), False, 'from graphscope.framework.app import AppAssets\n'), ((12307, 12354), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""bfs_generic"""', 'context': '"""tensor"""'}), "(algo='bfs_generic', context='tensor')\n", (12316, 12354), False, 'from graphscope.framework.app import AppAssets\n'), ((13556, 13627), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""all_pairs_shortest_path_length"""', 'context': '"""vertex_data"""'}), "(algo='all_pairs_shortest_path_length', context='vertex_data')\n", (13565, 13627), False, 'from graphscope.framework.app import AppAssets\n'), ((14026, 14079), 'networkx.algorithms.closeness_centrality', 'nxa.closeness_centrality', (['G', 'u', 'distance', 'wf_improved'], {}), '(G, u, distance, wf_improved)\n', (14050, 14079), True, 'import networkx.algorithms as nxa\n'), ((18990, 19038), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""transitivity"""', 'context': '"""tensor"""'}), "(algo='transitivity', context='tensor')\n", (18999, 19038), False, 'from graphscope.framework.app import AppAssets\n'), ((20317, 20367), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""avg_clustering"""', 'context': '"""tensor"""'}), "(algo='avg_clustering', context='tensor')\n", (20326, 20367), False, 'from graphscope.framework.app import AppAssets\n'), ((20795, 20849), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""wcc_projected"""', 'context': '"""vertex_data"""'}), "(algo='wcc_projected', context='vertex_data')\n", (20804, 20849), False, 'from graphscope.framework.app import AppAssets\n'), ((22857, 22895), 'networkx.algorithms.node_boundary', 'nxa.node_boundary', (['G', 'nbunch1', 'nbunch2'], {}), '(G, nbunch1, nbunch2)\n', (22874, 22895), True, 'import networkx.algorithms as nxa\n'), ((23552, 23611), 'networkx.algorithms.edge_boundary', 'nxa.edge_boundary', (['G', 'nbunch1', 'nbunch2', 'data', 'keys', 'default'], {}), '(G, nbunch1, nbunch2, data, keys, default)\n', (23569, 23611), True, 'import networkx.algorithms as nxa\n'), ((27822, 27857), 'graphscope.is_simple_path', 'graphscope.is_simple_path', (['G', 'nodes'], {}), '(G, nodes)\n', (27847, 27857), False, 'import graphscope\n'), ((27947, 27975), 'networkx.algorithms.is_simple_path', 'nxa.is_simple_path', (['G', 'nodes'], {}), '(G, nodes)\n', (27965, 27975), True, 'import networkx.algorithms as nxa\n'), ((28180, 28204), 'json.dumps', 'json.dumps', (['target_nodes'], {}), '(target_nodes)\n', (28190, 28204), False, 'import json\n'), ((35607, 35676), 'networkx.algorithms.betweenness_centrality', 'nxa.betweenness_centrality', (['G', 'k', 'normalized', 'weight', 'endpoints', 'seed'], {}), '(G, k, normalized, weight, endpoints, seed)\n', (35633, 35676), True, 'import networkx.algorithms as nxa\n'), ((965, 1034), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Missing graph_type attribute in graph object."""'], {}), "('Missing graph_type attribute in graph object.')\n", (985, 1034), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((13870, 13931), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""closeness_centrality"""', 'context': '"""vertex_data"""'}), "(algo='closeness_centrality', context='vertex_data')\n", (13879, 13931), False, 'from graphscope.framework.app import AppAssets\n'), ((22643, 22692), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""node_boundary"""', 'context': '"""tensor"""'}), "(algo='node_boundary', context='tensor')\n", (22652, 22692), False, 'from graphscope.framework.app import AppAssets\n'), ((23294, 23343), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""edge_boundary"""', 'context': '"""tensor"""'}), "(algo='edge_boundary', context='tensor')\n", (23303, 23343), False, 'from graphscope.framework.app import AppAssets\n'), ((28220, 28272), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""all_simple_paths"""', 'context': '"""tensor"""'}), "(algo='all_simple_paths', context='tensor')\n", (28229, 28272), False, 'from graphscope.framework.app import AppAssets\n'), ((35437, 35485), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': 'algorithm', 'context': '"""vertex_data"""'}), "(algo=algorithm, context='vertex_data')\n", (35446, 35485), False, 'from graphscope.framework.app import AppAssets\n'), ((1212, 1240), 'inspect.getfullargspec', 'inspect.getfullargspec', (['func'], {}), '(func)\n', (1234, 1240), False, 'import inspect\n'), ((1656, 1684), 'inspect.getfullargspec', 'inspect.getfullargspec', (['func'], {}), '(func)\n', (1678, 1684), False, 'import inspect\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pytest from networkx.classes.tests.test_digraph import BaseAttrDiGraphTester from networkx.testing import assert_nodes_equal from graphscope import nx from graphscope.nx.tests.classes.test_graph import TestEdgeSubgraph from graphscope.nx.tests.classes.test_graph import TestGraph @pytest.mark.usefixtures("graphscope_session") class TestDiGraph(BaseAttrDiGraphTester, TestGraph): def setup_method(self): self.Graph = nx.DiGraph # build K3 k3edges = [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() self.K3.update(k3edges, self.k3nodes) self.P3 = self.Graph() self.P3nodes = [0, 1, 2] self.P3edges = [(0, 1), (1, 2)] self.P3.update(self.P3edges, self.P3nodes) def test_to_undirected_reciprocal(self): pass def test_data_input(self): G = self.Graph({1: [2], 2: [1]}, name="test") assert G.name == "test" assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] assert sorted(G.succ.items()) == [(1, {2: {}}), (2, {1: {}})] assert sorted(G.pred.items()) == [(1, {2: {}}), (2, {1: {}})] def test_add_edge(self): G = self.Graph() G.add_edge(0, 1) assert G.adj == {0: {1: {}}, 1: {}} assert G.succ == {0: {1: {}}, 1: {}} assert G.pred == {0: {}, 1: {0: {}}} G = self.Graph() G.add_edge(*(0, 1)) assert G.adj == {0: {1: {}}, 1: {}} assert G.succ == {0: {1: {}}, 1: {}} assert G.pred == {0: {}, 1: {0: {}}} def test_add_edges_from(self): G = self.Graph() G.add_edges_from([(0, 1), (0, 2, {"data": 3})], data=2) assert G.adj == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} assert G.succ == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} assert G.pred == {0: {}, 1: {0: {"data": 2}}, 2: {0: {"data": 3}}} with pytest.raises(nx.NetworkXError): G.add_edges_from([(0,)]) # too few in tuple with pytest.raises(nx.NetworkXError): G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple with pytest.raises(TypeError): G.add_edges_from([0]) # not a tuple def test_remove_edge(self): G = self.K3 G.remove_edge(0, 1) assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} with pytest.raises(nx.NetworkXError): G.remove_edge(-1, 0) def test_remove_edges_from(self): G = self.K3 G.remove_edges_from([(0, 1)]) assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} G.remove_edges_from([(0, 0)]) # silent fail # replace the nx def test_out_edges_data(self): G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) assert sorted(G.out_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] assert sorted(G.out_edges(0, data=True)) == [(0, 1, {"data": 0})] assert sorted(G.out_edges(data="data")) == [(0, 1, 0), (1, 0, None)] assert sorted(G.out_edges(0, data="data")) == [(0, 1, 0)] # replace the nx def test_in_edges_data(self): G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) assert sorted(G.in_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] assert sorted(G.in_edges(1, data=True)) == [(0, 1, {"data": 0})] assert sorted(G.in_edges(data="data")) == [(0, 1, 0), (1, 0, None)] assert sorted(G.in_edges(1, data="data")) == [(0, 1, 0)] # replace the nx def test_reverse_copy(self): G = nx.DiGraph([(0, 1), (1, 2)]) R = G.reverse() assert sorted(R.edges()) == [(1, 0), (2, 1)] R.remove_edge(1, 0) assert sorted(R.edges()) == [(2, 1)] assert sorted(G.edges()) == [(0, 1), (1, 2)] # replace the nx def test_reverse_nocopy(self): G = nx.DiGraph([(0, 1), (1, 2)]) R = G.reverse(copy=False) assert sorted(R.edges()) == [(1, 0), (2, 1)] with pytest.raises(nx.NetworkXError): R.remove_edge(1, 0) # original test use function object as node, here we change to tuple. def test_reverse_hashable(self): x = (1, 2) y = (2, 3) G = nx.DiGraph() G.add_edge(x, y) assert_nodes_equal(G.nodes(), G.reverse().nodes()) assert [(y, x)] == list(G.reverse().edges()) @pytest.mark.usefixtures("graphscope_session") class TestEdgeSubgraph(TestEdgeSubgraph): def setup_method(self): # Create a doubly-linked path graph on five nodes. # G = nx.DiGraph(nx.path_graph(5)) G = nx.path_graph(5, nx.DiGraph) # Add some node, edge, and graph attributes. for i in range(5): G.nodes[i]["name"] = f"node{i}" G.edges[0, 1]["name"] = "edge01" G.edges[3, 4]["name"] = "edge34" G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. self.G = G self.H = G.edge_subgraph([(0, 1), (3, 4)]) def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) def test_pred_succ(self): """Test that nodes are added to predecessors and successors. For more information, see GitHub issue #2370. """ G = nx.DiGraph() G.add_edge(0, 1) H = G.edge_subgraph([(0, 1)]) assert list(H.predecessors(0)) == [] assert list(H.successors(0)) == [1] assert list(H.predecessors(1)) == [0] assert list(H.successors(1)) == []
[ "pytest.raises", "pytest.mark.usefixtures", "graphscope.nx.DiGraph", "graphscope.nx.path_graph" ]
[((691, 736), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (714, 736), False, 'import pytest\n'), ((5010, 5055), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (5033, 5055), False, 'import pytest\n'), ((3377, 3422), 'graphscope.nx.DiGraph', 'nx.DiGraph', (["[(0, 1, {'data': 0}), (1, 0, {})]"], {}), "([(0, 1, {'data': 0}), (1, 0, {})])\n", (3387, 3422), False, 'from graphscope import nx\n'), ((3791, 3836), 'graphscope.nx.DiGraph', 'nx.DiGraph', (["[(0, 1, {'data': 0}), (1, 0, {})]"], {}), "([(0, 1, {'data': 0}), (1, 0, {})])\n", (3801, 3836), False, 'from graphscope import nx\n'), ((4200, 4228), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['[(0, 1), (1, 2)]'], {}), '([(0, 1), (1, 2)])\n', (4210, 4228), False, 'from graphscope import nx\n'), ((4501, 4529), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['[(0, 1), (1, 2)]'], {}), '([(0, 1), (1, 2)])\n', (4511, 4529), False, 'from graphscope import nx\n'), ((4857, 4869), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (4867, 4869), False, 'from graphscope import nx\n'), ((5240, 5268), 'graphscope.nx.path_graph', 'nx.path_graph', (['(5)', 'nx.DiGraph'], {}), '(5, nx.DiGraph)\n', (5253, 5268), False, 'from graphscope import nx\n'), ((6008, 6020), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (6018, 6020), False, 'from graphscope import nx\n'), ((2404, 2435), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError'], {}), '(nx.NetworkXError)\n', (2417, 2435), False, 'import pytest\n'), ((2507, 2538), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError'], {}), '(nx.NetworkXError)\n', (2520, 2538), False, 'import pytest\n'), ((2619, 2643), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2632, 2643), False, 'import pytest\n'), ((2940, 2971), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError'], {}), '(nx.NetworkXError)\n', (2953, 2971), False, 'import pytest\n'), ((4630, 4661), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError'], {}), '(nx.NetworkXError)\n', (4643, 4661), False, 'import pytest\n')]
import networkx.algorithms.bipartite.tests.test_matching import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.bipartite.tests.test_matching, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.bipartite.tests.test_matching import TestMatching @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestMatching) class TestMatching(): # NB: graphscope.nx does not support tuple node, we remove the disconnecte_graph # from setup def setup(self): """Creates a bipartite graph for use in testing matching algorithms. The bipartite graph has a maximum cardinality matching that leaves vertex 1 and vertex 10 unmatched. The first six numbers are the left vertices and the next six numbers are the right vertices. """ self.simple_graph = nx.complete_bipartite_graph(2, 3) self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1} edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)] self.top_nodes = set(range(6)) self.graph = nx.Graph() self.graph.add_nodes_from(range(12)) self.graph.add_edges_from(edges) @pytest.mark.skip(reason="graphscope.nx does not support tuple node") def test_eppstein_matching_disconnected(self): with pytest.raises(nx.AmbiguousSolution): match = eppstein_matching(self.disconnected_graph) @pytest.mark.skip(reason="graphscope.nx does not support tuple node") def test_hopcroft_karp_matching_disconnected(self): with pytest.raises(nx.AmbiguousSolution): match = hopcroft_karp_matching(self.disconnected_graph) @pytest.mark.skip(reason="graphscope.nx does not support tuple node") def test_issue_2127(self): pass @pytest.mark.skip(reason="graphscope.nx does not support object node") def test_unorderable_nodes(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.raises", "pytest.mark.usefixtures" ]
[((438, 483), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (461, 483), False, 'import pytest\n'), ((485, 525), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestMatching'], {}), '(TestMatching)\n', (511, 525), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1344, 1412), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx does not support tuple node"""'}), "(reason='graphscope.nx does not support tuple node')\n", (1360, 1412), False, 'import pytest\n'), ((1583, 1651), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx does not support tuple node"""'}), "(reason='graphscope.nx does not support tuple node')\n", (1599, 1651), False, 'import pytest\n'), ((1832, 1900), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx does not support tuple node"""'}), "(reason='graphscope.nx does not support tuple node')\n", (1848, 1900), False, 'import pytest\n'), ((1951, 2020), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx does not support object node"""'}), "(reason='graphscope.nx does not support object node')\n", (1967, 2020), False, 'import pytest\n'), ((312, 357), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (335, 357), False, 'import pytest\n'), ((1477, 1512), 'pytest.raises', 'pytest.raises', (['nx.AmbiguousSolution'], {}), '(nx.AmbiguousSolution)\n', (1490, 1512), False, 'import pytest\n'), ((1721, 1756), 'pytest.raises', 'pytest.raises', (['nx.AmbiguousSolution'], {}), '(nx.AmbiguousSolution)\n', (1734, 1756), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Classes and functions used to manage dags. """ import queue from graphscope.framework.operation import Operation from graphscope.proto import op_def_pb2 class Dag(object): """Class represented as a GraphScope dataflow dag. A :class:`Dag` is always belongs to a session and containes a set of :class:`Operation` object, which performs computations on tensors. """ def __init__(self): # the order in which op joins the dag, starting by 1. self._seq = 1 # mapping from op's key to op self._ops_by_key = dict() self._ops_seq_by_key = dict() def __str__(self): return str(self.as_dag_def()) def __repr__(self): return self.__str__() def exists(self, op): if not isinstance(op, Operation): raise TypeError("op must be an Operation: {0}".format(op)) return op.key in self._ops_by_key def add_op(self, op): if not isinstance(op, Operation): raise TypeError("op must be an Operation: {0}".format(op)) if not op.evaluated and op.key in self._ops_by_key: raise ValueError("op named {0} already exist in dag".format(op.key)) self._ops_by_key[op.key] = op self._ops_seq_by_key[op.key] = self._seq self._seq += 1 def as_dag_def(self): """Return :class:`Dag` as a :class:`DagDef` proto buffer.""" dag_def = op_def_pb2.DagDef() for _, op in self._ops_by_key.items(): dag_def.op.extend([op.as_op_def()]) return dag_def def to_json(self): return dict({k: op.to_json() for k, op in self._ops_by_key}) def extract_subdag_for(self, ops): """Extract all nodes included the path that can reach the target ops.""" out = op_def_pb2.DagDef() # leaf op handle # there are two kinds of leaf op: # 1) unload graph / app # 2) networkx releated op if len(ops) == 1 and ops[0].is_leaf_op(): out.op.extend([ops[0].as_op_def()]) return out op_keys = list() # assert op is not present in current dag for op in ops: assert op.key in self._ops_by_key, "%s is not in the dag" % op.key assert not self._ops_by_key[op.key].evaluated, "%is is evaluated" % op.key op_keys.append(op.key) op_keys_to_keep = self._bfs_for_reachable_ops(op_keys) op_keys_to_keep = sorted(op_keys_to_keep, key=lambda n: self._ops_seq_by_key[n]) for key in op_keys_to_keep: op_def = self._ops_by_key[key].as_op_def() out.op.extend([op_def]) return out def clear(self): self._ops_by_key.clear() self._ops_seq_by_key.clear() self._seq = 1 def _bfs_for_reachable_ops(self, op_keys): """Breadth first search for reachable ops from target ops. Why we need bfs: We need to build a dependency order of ops in a DAG Why we need record a sequence number: We need to ensure the dependency order is correct when: - an op is depended by multiple ops - an op occurs multiple times in target_keys """ op_keys_to_keep = set() next_to_visit = queue.Queue() for key in op_keys: next_to_visit.put(key) while not next_to_visit.empty(): next_op = next_to_visit.get() if next_op in op_keys_to_keep: continue op_keys_to_keep.add(next_op) for parent_op in self._ops_by_key[next_op].parents: if not parent_op.evaluated: parent_key = parent_op.key next_to_visit.put(parent_key) return list(op_keys_to_keep) class DAGNode(object): """Base class to own :class:`Operation` information which as a node in a DAG.""" def __init__(self): self._op = None self._session = None @property def op(self): if self._op is None: raise ValueError("None value of op in dag node.") if not isinstance(self._op, Operation): raise ValueError("Type of op in dag node must be Operation") return self._op @op.setter def op(self, value): self._op = value @property def evaluated(self): return self._op.evaluated @evaluated.setter def evaluated(self, value): self._op.evaluated = bool(value) @property def session(self): """Get the session that the dag node belongs to.""" assert self._session is not None return self._session @session.setter def session(self, value): self._session = value @property def session_id(self): """Get the session id that the dag node belongs to.""" assert self._session is not None return self._session.session_id
[ "queue.Queue", "graphscope.proto.op_def_pb2.DagDef" ]
[((2080, 2099), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (2097, 2099), False, 'from graphscope.proto import op_def_pb2\n'), ((2446, 2465), 'graphscope.proto.op_def_pb2.DagDef', 'op_def_pb2.DagDef', ([], {}), '()\n', (2463, 2465), False, 'from graphscope.proto import op_def_pb2\n'), ((3935, 3948), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (3946, 3948), False, 'import queue\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import concurrent.futures import io from functools import lru_cache import msgpack import simdjson from graphscope.framework import dag_utils from graphscope.nx.utils.misc import clear_mutation_cache from graphscope.proto import graph_def_pb2 from graphscope.proto import types_pb2 __all__ = ["Cache"] class Cache: """A adhoc cache for graphscope.nx Graph. The Cache is consists of two kind of cache: the iteration batch cache for __iter__ and the LRU cache for cache miss. """ def __init__(self, graph): self._graph = graph # the iteration caches for graph data self.node_id_cache = () self.node_attr_cache = () self.succ_cache = () self.succ_attr_cache = () self.pred_cache = () self.pred_attr_cache = () # status for iteration batch cache self._len = 0 self.id2i = {} self.enable_iter_cache = False self.iter_gid = 0 self.iter_pre_gid = 0 self.node_attr_align = False self.succ_align = False self.succ_attr_align = False self.pred_align = False self.pred_attr_align = False # thread pool and promises for iteration batch cache fetch self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) self.futures = { "node_id": None, "node_attr": None, "succ": None, "succ_attr": None, "pred": None, "pred_attr": None, } def warmup(self): """Warm up the iteration cache.""" self._len = self._graph.number_of_nodes() if self._len > 1000: # avoid much small graphs to compete thread resource self.enable_iter_cache = True self._async_fetch_node_id_cache(0) self._async_fetch_succ_cache(0) self._async_fetch_node_attr_cache(0) self._async_fetch_succ_attr_cache(0) # LRU Caches @lru_cache(1000000) def get_node_attr(self, n): return get_node_data(self._graph, n) @lru_cache(1000000) def get_successors(self, n): return get_neighbors(self._graph, n) @lru_cache(1000000) def get_succ_attr(self, n): return get_neighbors_attr(self._graph, n) @lru_cache(1000000) def get_predecessors(self, n): return get_neighbors(self._graph, n, pred=True) @lru_cache(1000000) def get_pred_attr(self, n): return get_neighbors_attr(self._graph, n, pred=True) def align_node_attr_cache(self): """Check and align the node attr cache with node id cache""" if self.enable_iter_cache and self.node_attr_align is False: f = self.futures["node_attr"] if f is not None: start_gid, self.node_attr_cache = f.result() if start_gid == self.iter_pre_gid: # align to current node_id_cache if self.iter_gid != self.iter_pre_gid: self._async_fetch_node_attr_cache(self.iter_gid) self.node_attr_align = True else: # not align to current node_id_cache, should fetch again self._async_fetch_node_attr_cache(self.iter_pre_gid) return self.node_attr_align def align_succ_cache(self): """Check and align the succ neighbor cache with node id cache""" if self.enable_iter_cache and self.succ_align is False: f = self.futures["succ"] start_gid, self.succ_cache = f.result() if start_gid == self.iter_pre_gid: if self.iter_gid != self.iter_pre_gid: self._async_fetch_succ_cache(self.iter_gid) self.succ_align = True else: self._async_fetch_succ_cache(self.iter_pre_gid) return self.succ_align def align_succ_attr_cache(self): """Check and align the succ neighbor attr cache with node id cache""" if self.enable_iter_cache and self.succ_attr_align is False: f = self.futures["succ_attr"] if f is not None: start_gid, self.succ_attr_cache = f.result() if start_gid == self.iter_pre_gid: if self.iter_gid != self.iter_pre_gid: self._async_fetch_succ_attr_cache(self.iter_gid) self.succ_attr_align = True else: self._async_fetch_succ_attr_cache(self.iter_pre_gid) return self.succ_attr_align def align_pred_cache(self): """Check and align the pred neighbor cache with node id cache""" if self.enable_iter_cache and self.pred_align is False: if self.futures["pred"] is None: self._async_fetch_pred_cache(self.iter_pre_gid) f = self.futures["pred"] start_gid, self.pred_cache = f.result() if start_gid == self.iter_pre_gid: if self.iter_gid != self.iter_pre_gid: self._async_fetch_pred_cache(self.iter_gid) self.pred_align = True else: print("pred not align", start_gid, self.iter_pre_gid) self._async_fetch_pred_cache(self.iter_pre_gid) return self.pred_align def align_pred_attr_cache(self): """Check and align the pred neighbor attr cache with node id cache""" if self.enable_iter_cache and self.pred_attr_align is False: if self.futures["pred_attr"] is None: self._async_fetch_pred_attr_cache(self.iter_pre_gid) f = self.futures["pred_attr"] start_gid, self.pred_attr_cache = f.result() if start_gid == self.iter_pre_gid: if self.iter_gid != self.iter_pre_gid: self._async_fetch_pred_attr_cache(self.iter_gid) self.pred_attr_align = True else: self._async_fetch_pred_attr_cache(self.iter_pre_gid) return self.pred_attr_align def align_neighbor_cache(self, pred=False): return self.align_pred_cache() if pred else self.align_succ_cache() def align_neighbor_attr_cache(self, pred=True): return self.align_pred_attr_cache() if pred else self.align_succ_attr_cache() @clear_mutation_cache def __contains__(self, key): if self.enable_iter_cache: if len(self.node_id_cache) == 0 and self.futures["node_id"] is not None: self.iter_pre_gid = self.iter_gid self.iter_gid, node_size, self.node_id_cache = self.futures[ "node_id" ].result() self.futures["node_id"] = None if self.iter_gid != self.iter_pre_gid: self._async_fetch_node_id_cache(self.iter_gid) if not self.id2i and self.node_id_cache: # initialize the id to index hash map self.id2i = {k: v for v, k in enumerate(self.node_id_cache)} return key in self.id2i @clear_mutation_cache def __len__(self): return self._len @clear_mutation_cache def __iter__(self): iter_n = 0 while True: if iter_n >= self._len: break if iter_n == 0 and len(self.node_id_cache) > 0: iter_n += len(self.node_id_cache) else: self.iter_pre_gid = self.iter_gid if self.enable_iter_cache: self.iter_gid, node_size, self.node_id_cache = self.futures[ "node_id" ].result() if self.iter_gid != self.iter_pre_gid: self._async_fetch_node_id_cache(self.iter_gid) else: ( self.iter_gid, node_size, self.node_id_cache, ) = self._get_node_id_cache(self.iter_gid) iter_n += node_size self.id2i.clear() self.node_attr_align = False self.succ_align = False self.succ_attr_align = False self.pred_align = False self.pred_attr_align = False yield from self.node_id_cache def shutdown(self): for _, future in self.futures.items(): if future is not None: future.cancel() for _, future in self.futures.items(): if future is not None: try: future.result() except concurrent.futures.CancelledError: pass future = None def clear(self): """Clear batch cache and lru cache, reset the status and warmup again""" if self.enable_iter_cache: self.shutdown() self.enable_iter_cache = False self.iter_gid = 0 self.iter_pre_gid = 0 self.id2i.clear() self.node_id_cache = () self.node_attr_cache = () self.succ_cache = () self.succ_attr_cache = () self.pred_cache = () self.pred_attr_cache = () self.node_attr_align = ( self.succ_align ) = self.succ_attr_align = self.pred_align = self.pred_attr_align = False self.get_node_attr.cache_clear() self.get_successors.cache_clear() self.get_succ_attr.cache_clear() self.get_predecessors.cache_clear() self.get_pred_attr.cache_clear() self.warmup() def clear_node_attr_cache(self): """Clear the node attr cache""" if self.futures["node_attr"] is not None: self.futures["node_attr"].cancel() if self.futures["node_attr"] is not None: try: self.futures["node_attr"].result() except concurrent.futures.CancelledError: pass self.futures["node_attr"] = None self.node_attr_cache = () self.get_node_attr.cache_clear() self.node_attr_align = False def clear_neighbor_attr_cache(self): """Clear the neighbor attr cache""" if self.futures["succ_attr"] is not None: self.futures["succ_attr"].cancel() if self.futures["pred_attr"] is not None: self.futures["pred_attr"].cancel() if self.futures["succ_attr"] is not None: try: self.futures["succ_attr"].result() except concurrent.futures.CancelledError: pass if self.futures["pred_attr"] is not None: try: self.futures["pred_attr"].result() except concurrent.futures.CancelledError: pass self.futures["succ_attr"] = None self.futures["pred_attr"] = None self.succ_attr_cache = () self.pred_attr_cache = () self.get_succ_attr.cache_clear() self.get_pred_attr.cache_clear() self.succ_attr_align = False self.pred_attr_align = False def _async_fetch_node_id_cache(self, gid): self.futures["node_id"] = self.executor.submit(self._get_node_id_cache, gid) def _async_fetch_node_attr_cache(self, gid): self.futures["node_attr"] = self.executor.submit(self._get_node_attr_cache, gid) def _async_fetch_succ_cache(self, gid): self.futures["succ"] = self.executor.submit(self._get_succ_cache, gid) def _async_fetch_pred_cache(self, gid): self.futures["pred"] = self.executor.submit(self._get_pred_cache, gid) def _async_fetch_succ_attr_cache(self, gid): self.futures["succ_attr"] = self.executor.submit(self._get_succ_attr_cache, gid) def _async_fetch_pred_attr_cache(self, gid): self.futures["pred_attr"] = self.executor.submit(self._get_pred_attr_cache, gid) def _get_node_id_cache(self, gid): op = dag_utils.report_graph( self._graph, types_pb2.NODE_ID_CACHE_BY_GID, gid=gid ) archive = op.eval() gid = archive.get_uint64() node_size = archive.get_uint32() fp = io.BytesIO(archive.get_bytes()) node_array = msgpack.load(fp, use_list=False) return gid, node_size, node_array def _get_node_attr_cache(self, gid): op = dag_utils.report_graph( self._graph, types_pb2.NODE_ATTR_CACHE_BY_GID, gid=gid ) archive = op.eval() gid = archive.get_uint64() fp = io.BytesIO(archive.get_bytes()) node_attr_cache = msgpack.load(fp, use_list=False) return gid, node_attr_cache def _get_succ_cache(self, gid): op = dag_utils.report_graph(self._graph, types_pb2.SUCC_BY_GID, gid=gid) archive = op.eval() gid = archive.get_uint64() fp = io.BytesIO(archive.get_bytes()) succ_cache = msgpack.load(fp, use_list=False) return gid, succ_cache def _get_pred_cache(self, gid): op = dag_utils.report_graph(self._graph, types_pb2.PRED_BY_GID, gid=gid) archive = op.eval() gid = archive.get_uint64() fp = io.BytesIO(archive.get_bytes()) pred_cache = msgpack.load(fp, use_list=False) return gid, pred_cache def _get_succ_attr_cache(self, gid): op = dag_utils.report_graph(self._graph, types_pb2.SUCC_ATTR_BY_GID, gid=gid) archive = op.eval() gid = archive.get_uint64() fp = io.BytesIO(archive.get_bytes()) succ_attr_cache = msgpack.load(fp, use_list=False) return gid, succ_attr_cache def _get_pred_attr_cache(self, gid): op = dag_utils.report_graph(self._graph, types_pb2.PRED_ATTR_BY_GID, gid=gid) archive = op.eval() gid = archive.get_uint64() fp = io.BytesIO(archive.get_bytes()) pred_attr_cache = msgpack.load(fp, use_list=False) return gid, pred_attr_cache def get_neighbors(graph, n, pred=False): """Get the neighbors of node in graph. Parameters ---------- graph: the graph to query. n: node the node to get neighbors. report_type: the report type of report graph operation, types_pb2.SUCCS_BY_NODE: get the successors of node, types_pb2.PREDS_BY_NODE: get the predecessors of node, """ if graph.graph_type == graph_def_pb2.ARROW_PROPERTY: n = graph._convert_to_label_id_tuple(n) report_t = types_pb2.PREDS_BY_NODE if pred else types_pb2.SUCCS_BY_NODE op = dag_utils.report_graph(graph, report_t, node=simdjson.dumps(n).encode("utf-8")) archive = op.eval() return msgpack.unpackb(archive.get_bytes(), use_list=False) def get_neighbors_attr(graph, n, pred=False): """Get the neighbors attr of node in graph. Parameters ---------- graph: the graph to query. n: node the node to get neighbors. report_type: the report type of report graph operation, types_pb2.SUCC_ATTR_BY_NODE: get the successors attr of node, types_pb2.PRED_ATTR_BY_NODE: get the predecessors attr of node, Returns ------- attr: tuple """ if graph.graph_type == graph_def_pb2.ARROW_PROPERTY: n = graph._convert_to_label_id_tuple(n) report_t = types_pb2.PRED_ATTR_BY_NODE if pred else types_pb2.SUCC_ATTR_BY_NODE op = dag_utils.report_graph(graph, report_t, node=simdjson.dumps(n).encode("utf-8")) archive = op.eval() return simdjson.loads(archive.get_bytes()) def get_node_data(graph, n): """Returns the attribute dictionary of node n. This is identical to `G[n]`. Parameters ---------- n : nodes Returns ------- node_dict : dictionary The node attribute dictionary. Examples -------- >>> G = nx.path_graph(4) # or DiGraph etc >>> G[0] {} Warning: Assigning to `G[n]` is not permitted. But it is safe to assign attributes `G[n]['foo']` >>> G[0]['weight'] = 7 >>> G[0]['weight'] 7 >>> G = nx.path_graph(4) # or DiGraph etc >>> G.get_node_data(0, 1) {} """ if graph.graph_type == graph_def_pb2.ARROW_PROPERTY: n = graph._convert_to_label_id_tuple(n) op = dag_utils.report_graph( graph, types_pb2.NODE_DATA, node=simdjson.dumps(n).encode("utf-8") ) archive = op.eval() return msgpack.loads(archive.get_bytes(), use_list=False)
[ "simdjson.dumps", "functools.lru_cache", "graphscope.framework.dag_utils.report_graph", "msgpack.load" ]
[((2642, 2660), 'functools.lru_cache', 'lru_cache', (['(1000000)'], {}), '(1000000)\n', (2651, 2660), False, 'from functools import lru_cache\n'), ((2744, 2762), 'functools.lru_cache', 'lru_cache', (['(1000000)'], {}), '(1000000)\n', (2753, 2762), False, 'from functools import lru_cache\n'), ((2847, 2865), 'functools.lru_cache', 'lru_cache', (['(1000000)'], {}), '(1000000)\n', (2856, 2865), False, 'from functools import lru_cache\n'), ((2954, 2972), 'functools.lru_cache', 'lru_cache', (['(1000000)'], {}), '(1000000)\n', (2963, 2972), False, 'from functools import lru_cache\n'), ((3070, 3088), 'functools.lru_cache', 'lru_cache', (['(1000000)'], {}), '(1000000)\n', (3079, 3088), False, 'from functools import lru_cache\n'), ((12616, 12692), 'graphscope.framework.dag_utils.report_graph', 'dag_utils.report_graph', (['self._graph', 'types_pb2.NODE_ID_CACHE_BY_GID'], {'gid': 'gid'}), '(self._graph, types_pb2.NODE_ID_CACHE_BY_GID, gid=gid)\n', (12638, 12692), False, 'from graphscope.framework import dag_utils\n'), ((12885, 12917), 'msgpack.load', 'msgpack.load', (['fp'], {'use_list': '(False)'}), '(fp, use_list=False)\n', (12897, 12917), False, 'import msgpack\n'), ((13015, 13093), 'graphscope.framework.dag_utils.report_graph', 'dag_utils.report_graph', (['self._graph', 'types_pb2.NODE_ATTR_CACHE_BY_GID'], {'gid': 'gid'}), '(self._graph, types_pb2.NODE_ATTR_CACHE_BY_GID, gid=gid)\n', (13037, 13093), False, 'from graphscope.framework import dag_utils\n'), ((13250, 13282), 'msgpack.load', 'msgpack.load', (['fp'], {'use_list': '(False)'}), '(fp, use_list=False)\n', (13262, 13282), False, 'import msgpack\n'), ((13369, 13436), 'graphscope.framework.dag_utils.report_graph', 'dag_utils.report_graph', (['self._graph', 'types_pb2.SUCC_BY_GID'], {'gid': 'gid'}), '(self._graph, types_pb2.SUCC_BY_GID, gid=gid)\n', (13391, 13436), False, 'from graphscope.framework import dag_utils\n'), ((13566, 13598), 'msgpack.load', 'msgpack.load', (['fp'], {'use_list': '(False)'}), '(fp, use_list=False)\n', (13578, 13598), False, 'import msgpack\n'), ((13680, 13747), 'graphscope.framework.dag_utils.report_graph', 'dag_utils.report_graph', (['self._graph', 'types_pb2.PRED_BY_GID'], {'gid': 'gid'}), '(self._graph, types_pb2.PRED_BY_GID, gid=gid)\n', (13702, 13747), False, 'from graphscope.framework import dag_utils\n'), ((13877, 13909), 'msgpack.load', 'msgpack.load', (['fp'], {'use_list': '(False)'}), '(fp, use_list=False)\n', (13889, 13909), False, 'import msgpack\n'), ((13996, 14068), 'graphscope.framework.dag_utils.report_graph', 'dag_utils.report_graph', (['self._graph', 'types_pb2.SUCC_ATTR_BY_GID'], {'gid': 'gid'}), '(self._graph, types_pb2.SUCC_ATTR_BY_GID, gid=gid)\n', (14018, 14068), False, 'from graphscope.framework import dag_utils\n'), ((14203, 14235), 'msgpack.load', 'msgpack.load', (['fp'], {'use_list': '(False)'}), '(fp, use_list=False)\n', (14215, 14235), False, 'import msgpack\n'), ((14327, 14399), 'graphscope.framework.dag_utils.report_graph', 'dag_utils.report_graph', (['self._graph', 'types_pb2.PRED_ATTR_BY_GID'], {'gid': 'gid'}), '(self._graph, types_pb2.PRED_ATTR_BY_GID, gid=gid)\n', (14349, 14399), False, 'from graphscope.framework import dag_utils\n'), ((14534, 14566), 'msgpack.load', 'msgpack.load', (['fp'], {'use_list': '(False)'}), '(fp, use_list=False)\n', (14546, 14566), False, 'import msgpack\n'), ((15249, 15266), 'simdjson.dumps', 'simdjson.dumps', (['n'], {}), '(n)\n', (15263, 15266), False, 'import simdjson\n'), ((16095, 16112), 'simdjson.dumps', 'simdjson.dumps', (['n'], {}), '(n)\n', (16109, 16112), False, 'import simdjson\n'), ((16983, 17000), 'simdjson.dumps', 'simdjson.dumps', (['n'], {}), '(n)\n', (16997, 17000), False, 'import simdjson\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from graphscope.framework.app import AppAssets from graphscope.framework.app import not_compatible_for from graphscope.framework.app import project_to_simple __all__ = [ "sssp", ] @project_to_simple @not_compatible_for("arrow_property", "dynamic_property") def sssp(graph, src=0, weight=None): """Compute single source shortest path length on the `graph`. Note that the `sssp` algorithm requires an numerical property on the edge. Args: graph (:class:`graphscope.Graph`): A simple graph. src (optional): The source vertex. The type should be consistent with the id type of the `graph`, that is, it's `int` or `str` depending on the `oid_type` is `int64_t` or `string` of the `graph`. Defaults to 0. weight (str, optional): The edge data key corresponding to the edge weight. Note that property under multiple labels should have the consistent index. Defaults to None. Returns: :class:`graphscope.framework.context.VertexDataContextDAGNode`: A context with each vertex assigned with the shortest distance from the `src`, evaluated in eager mode. Examples: .. code:: python >>> import graphscope >>> from graphscope.dataset import load_p2p_network >>> sess = graphscope.session(cluster_type="hosts", mode="eager") >>> g = load_p2p_network(sess) >>> # project to a simple graph (if needed) >>> pg = g.project(vertices={"host": ["id"]}, edges={"connect": ["dist"]}) >>> c = graphscope.sssp(pg, src=6) >>> sess.close() """ return AppAssets(algo="sssp", context="vertex_data")(graph, src)
[ "graphscope.framework.app.not_compatible_for", "graphscope.framework.app.AppAssets" ]
[((874, 930), 'graphscope.framework.app.not_compatible_for', 'not_compatible_for', (['"""arrow_property"""', '"""dynamic_property"""'], {}), "('arrow_property', 'dynamic_property')\n", (892, 930), False, 'from graphscope.framework.app import not_compatible_for\n'), ((2305, 2350), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp"""', 'context': '"""vertex_data"""'}), "(algo='sssp', context='vertex_data')\n", (2314, 2350), False, 'from graphscope.framework.app import AppAssets\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import os import pytest from networkx.algorithms.isomorphism.isomorph import is_isomorphic from networkx.generators.tests.test_harary_graph import TestHararyGraph import graphscope.nx as nx from graphscope.nx.generators.harary_graph import hkn_harary_graph from graphscope.nx.generators.harary_graph import hnm_harary_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestHararyGraph) class TestHararyGraph: def test_hkn_harary_graph(self): # When k == 1, the hkn_harary_graph(k,n) is # the path_graph(n) for (k, n) in [(1, 6), (1, 7)]: G1 = hkn_harary_graph(k, n) G2 = nx.path_graph(n) assert is_isomorphic(G1, G2) # When k is even, the hkn_harary_graph(k,n) is # the circulant_graph(n, list(range(1,k/2+1))) for (k, n) in [(2, 6), (2, 7), (4, 6), (4, 7)]: G1 = hkn_harary_graph(k, n) G2 = nx.circulant_graph(n, list(range(1, k // 2 + 1))) assert is_isomorphic(G1, G2) # When k is odd and n is even, the hkn_harary_graph(k,n) is # the circulant_graph(n, list(range(1,(k+1)/2)) plus [n/2]) for (k, n) in [(3, 6), (5, 8), (7, 10)]: G1 = hkn_harary_graph(k, n) L = list(range(1, (k + 1) // 2)) L.append(n // 2) G2 = nx.circulant_graph(n, L) assert is_isomorphic(G1, G2) # When k is odd and n is odd, the hkn_harary_graph(k,n) is # the circulant_graph(n, list(range(1,(k+1)/2))) with # n//2+1 edges added between node i and node i+n//2+1 for (k, n) in [(3, 5), (5, 9), (7, 11)]: G1 = hkn_harary_graph(k, n) G2 = nx.circulant_graph(n, list(range(1, (k + 1) // 2))) eSet1 = set(G1.edges) eSet2 = set(G2.edges) eSet3 = set() half = n // 2 for i in range(0, half + 1): # add half+1 edges between i and i+half eSet3.add((i, (i + half) % n)) if os.environ.get("DEPLOYMENT", None) != "standalone" and k == 7: eSet1.remove((8, 3)) eSet1.remove((6, 1)) eSet1.remove((10, 5)) eSet1.add((3, 8)) eSet1.add((1, 6)) eSet1.add((5, 10)) assert eSet1 == eSet2 | eSet3 # Raise NetworkXError if k<1 k = 0 n = 0 pytest.raises(nx.NetworkXError, hkn_harary_graph, k, n) # Raise NetworkXError if n<k+1 k = 6 n = 6 pytest.raises(nx.NetworkXError, hkn_harary_graph, k, n)
[ "graphscope.nx.circulant_graph", "os.environ.get", "graphscope.nx.utils.compat.with_graphscope_nx_context", "networkx.algorithms.isomorphism.isomorph.is_isomorphic", "pytest.raises", "pytest.mark.usefixtures", "graphscope.nx.path_graph", "graphscope.nx.generators.harary_graph.hkn_harary_graph" ]
[((795, 840), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (818, 840), False, 'import pytest\n'), ((842, 885), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestHararyGraph'], {}), '(TestHararyGraph)\n', (868, 885), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((2902, 2957), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'hkn_harary_graph', 'k', 'n'], {}), '(nx.NetworkXError, hkn_harary_graph, k, n)\n', (2915, 2957), False, 'import pytest\n'), ((3034, 3089), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'hkn_harary_graph', 'k', 'n'], {}), '(nx.NetworkXError, hkn_harary_graph, k, n)\n', (3047, 3089), False, 'import pytest\n'), ((1083, 1105), 'graphscope.nx.generators.harary_graph.hkn_harary_graph', 'hkn_harary_graph', (['k', 'n'], {}), '(k, n)\n', (1099, 1105), False, 'from graphscope.nx.generators.harary_graph import hkn_harary_graph\n'), ((1123, 1139), 'graphscope.nx.path_graph', 'nx.path_graph', (['n'], {}), '(n)\n', (1136, 1139), True, 'import graphscope.nx as nx\n'), ((1159, 1180), 'networkx.algorithms.isomorphism.isomorph.is_isomorphic', 'is_isomorphic', (['G1', 'G2'], {}), '(G1, G2)\n', (1172, 1180), False, 'from networkx.algorithms.isomorphism.isomorph import is_isomorphic\n'), ((1365, 1387), 'graphscope.nx.generators.harary_graph.hkn_harary_graph', 'hkn_harary_graph', (['k', 'n'], {}), '(k, n)\n', (1381, 1387), False, 'from graphscope.nx.generators.harary_graph import hkn_harary_graph\n'), ((1474, 1495), 'networkx.algorithms.isomorphism.isomorph.is_isomorphic', 'is_isomorphic', (['G1', 'G2'], {}), '(G1, G2)\n', (1487, 1495), False, 'from networkx.algorithms.isomorphism.isomorph import is_isomorphic\n'), ((1699, 1721), 'graphscope.nx.generators.harary_graph.hkn_harary_graph', 'hkn_harary_graph', (['k', 'n'], {}), '(k, n)\n', (1715, 1721), False, 'from graphscope.nx.generators.harary_graph import hkn_harary_graph\n'), ((1813, 1837), 'graphscope.nx.circulant_graph', 'nx.circulant_graph', (['n', 'L'], {}), '(n, L)\n', (1831, 1837), True, 'import graphscope.nx as nx\n'), ((1857, 1878), 'networkx.algorithms.isomorphism.isomorph.is_isomorphic', 'is_isomorphic', (['G1', 'G2'], {}), '(G1, G2)\n', (1870, 1878), False, 'from networkx.algorithms.isomorphism.isomorph import is_isomorphic\n'), ((2137, 2159), 'graphscope.nx.generators.harary_graph.hkn_harary_graph', 'hkn_harary_graph', (['k', 'n'], {}), '(k, n)\n', (2153, 2159), False, 'from graphscope.nx.generators.harary_graph import hkn_harary_graph\n'), ((2508, 2542), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (2522, 2542), False, 'import os\n')]
import pytest from networkx.algorithms.tests.test_chordal import TestMCS import graphscope.nx as nx from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestMCS) class TestMCS: # NB: graphscope.nx does not support grid_graph, remove grid_graph def test_complete_to_chordal_graph(self): fgrg = nx.fast_gnp_random_graph test_graphs = [ nx.barbell_graph(6, 2), nx.cycle_graph(15), nx.wheel_graph(20), nx.ladder_graph(15), nx.star_graph(5), nx.bull_graph(), fgrg(20, 0.3, seed=1), ] for G in test_graphs: H, a = nx.complete_to_chordal_graph(G) assert nx.is_chordal(H) assert len(a) == H.number_of_nodes() if nx.is_chordal(G): assert G.number_of_edges() == H.number_of_edges() assert set(a.values()) == {0} else: assert len(set(a.values())) == H.number_of_nodes()
[ "graphscope.nx.cycle_graph", "graphscope.nx.bull_graph", "graphscope.nx.is_chordal", "graphscope.nx.wheel_graph", "graphscope.nx.ladder_graph", "graphscope.nx.star_graph", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures", "graphscope.nx.complete_to_chordal_graph", "graphscope.nx.barbell_graph" ]
[((170, 215), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (193, 215), False, 'import pytest\n'), ((217, 252), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestMCS'], {}), '(TestMCS)\n', (243, 252), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((461, 483), 'graphscope.nx.barbell_graph', 'nx.barbell_graph', (['(6)', '(2)'], {}), '(6, 2)\n', (477, 483), True, 'import graphscope.nx as nx\n'), ((497, 515), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(15)'], {}), '(15)\n', (511, 515), True, 'import graphscope.nx as nx\n'), ((529, 547), 'graphscope.nx.wheel_graph', 'nx.wheel_graph', (['(20)'], {}), '(20)\n', (543, 547), True, 'import graphscope.nx as nx\n'), ((561, 580), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['(15)'], {}), '(15)\n', (576, 580), True, 'import graphscope.nx as nx\n'), ((594, 610), 'graphscope.nx.star_graph', 'nx.star_graph', (['(5)'], {}), '(5)\n', (607, 610), True, 'import graphscope.nx as nx\n'), ((624, 639), 'graphscope.nx.bull_graph', 'nx.bull_graph', ([], {}), '()\n', (637, 639), True, 'import graphscope.nx as nx\n'), ((735, 766), 'graphscope.nx.complete_to_chordal_graph', 'nx.complete_to_chordal_graph', (['G'], {}), '(G)\n', (763, 766), True, 'import graphscope.nx as nx\n'), ((786, 802), 'graphscope.nx.is_chordal', 'nx.is_chordal', (['H'], {}), '(H)\n', (799, 802), True, 'import graphscope.nx as nx\n'), ((867, 883), 'graphscope.nx.is_chordal', 'nx.is_chordal', (['G'], {}), '(G)\n', (880, 883), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pytest import graphscope import graphscope.nx as nx from graphscope.client.session import g from graphscope.framework.errors import AnalyticalEngineInternalError from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.loader import Loader from graphscope.proto import types_pb2 @pytest.fixture(scope="session") def graphscope_session(): graphscope.set_option(show_log=True) graphscope.set_option(initializing_interactive_engine=False) sess = graphscope.session(cluster_type="hosts", num_workers=1) sess.as_default() yield sess sess.close() def ldbc_sample_single_label(prefix, directed): graph = graphscope.g(directed=directed) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_single_label_with_sess(sess, prefix, directed): graph = sess.g(directed=directed) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_multi_labels(prefix, directed): graph = graphscope.g(directed=directed) graph = ( graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) .add_vertices( Loader(os.path.join(prefix, "person_0_0.csv"), delimiter="|"), "person" ) .add_vertices( Loader(os.path.join(prefix, "post_0_0.csv"), delimiter="|"), "post", ) ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", src_label="comment", dst_label="comment", ).add_edges( Loader(os.path.join(prefix, "person_knows_person_0_0.csv"), delimiter="|"), "knows", ["creationDate"], src_label="person", dst_label="person", ) return graph def ldbc_sample_with_duplicated_oid(prefix, directed): graph = graphscope.g(directed=directed) graph = graph.add_vertices( Loader(os.path.join(prefix, "place_0_0.csv"), delimiter="|"), "place" ).add_vertices( Loader(os.path.join(prefix, "person_0_0.csv"), delimiter="|"), "person" ) graph = graph.add_edges( Loader(os.path.join(prefix, "place_isPartOf_place_0_0.csv"), delimiter="|"), "isPartOf", src_label="place", dst_label="place", ).add_edges( Loader(os.path.join(prefix, "person_knows_person_0_0.csv"), delimiter="|"), "knows", ["creationDate"], src_label="person", dst_label="person", ) return graph @pytest.mark.usefixtures("graphscope_session") class TestGraphTransformation(object): @classmethod def setup_class(cls): cls.NXGraph = nx.Graph cls.data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample") cls.single_label_g = ldbc_sample_single_label(cls.data_dir, False) cls.multi_label_g = ldbc_sample_multi_labels(cls.data_dir, False) cls.duplicated_oid_g = ldbc_sample_with_duplicated_oid(cls.data_dir, False) # FIXME: this is tricky way to create a str gs graph les_g = nx.les_miserables_graph() cls.str_oid_g = g(les_g) @classmethod def teardown_class(cls): cls.single_label_g.unload() cls.multi_label_g.unload() cls.duplicated_oid_g.unload() def assert_convert_success(self, gs_g, nx_g): assert gs_g.is_directed() == nx_g.is_directed() assert self._schema_equal(gs_g.schema, nx_g.schema) def _schema_equal(self, gs_schema, nx_schema): v_props = {} for entry in gs_schema._valid_vertex_entries(): for prop in entry.properties: v_props[prop.name] = prop.type e_props = {} for entry in gs_schema._valid_edge_entries(): for prop in entry.properties: e_props[prop.name] = prop.type gs_v_props = { prop.name: prop.type for prop in list(nx_schema._valid_vertex_entries())[0].properties } gs_e_props = { prop.name: prop.type for prop in list(nx_schema._valid_edge_entries())[0].properties } return v_props == gs_v_props and e_props == gs_e_props # nx to gs def test_empty_nx_to_gs(self): empty_nx_g = self.NXGraph(dist=True) gs_g = g(empty_nx_g) self.assert_convert_success(gs_g, empty_nx_g) def test_only_contains_nodes_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(100), type="node") gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_simple_nx_to_gs(self): nx_g = nx.complete_graph(10, create_using=self.NXGraph) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_int_node_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(10), foo="star") nx_g.add_edges_from( [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8), (8, 9)], weight=3.14, ) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_str_node_nx_to_gs(self): nx_g = nx.les_miserables_graph() gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_complete_nx_to_gs(self): # multi-propery, node propery and edge propty both aligned nodes = [ (0, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (1, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (2, {"vp1": 1, "vp2": "v", "vp3": 3.14}), ] edges = [ (0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), ] nx_g = self.NXGraph(dist=True) nx_g.update(edges, nodes) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # node property aliged, edge not aliged nx_g2 = nx_g.copy() nx_g2.add_edge(0, 1, ep4="new propery") gs_g2 = g(nx_g2) self.assert_convert_success(gs_g2, nx_g2) # edge property aliged, node not aliged nx_g3 = nx_g.copy() nx_g3.add_node(2, vp4="new propery") gs_g3 = g(nx_g3) self.assert_convert_success(gs_g3, nx_g3) # both not aliged nx_g4 = nx_g.copy() nx_g4.add_edge(0, 1, ep4="new propery") nx_g4.add_node(2, vp4="new propery") gs_g4 = g(nx_g4) self.assert_convert_success(gs_g4, nx_g4) def test_nx_to_gs_after_modify(self): nx_g = self.NXGraph(dist=True) nodes = [ (0, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (1, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (2, {"vp1": 1, "vp2": "v", "vp3": 3.14}), ] # add nodes nx_g.add_nodes_from(nodes) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # add_edges edges = [ (0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), ] nx_g.add_edges_from(edges) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # remove edge nx_g.remove_edge(0, 1) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # remove node nx_g.remove_node(0) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # clear nx_g.clear() gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_nx_to_gs_remove_nodes(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(10)) # all nodes are int gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # success nx_g.add_node("str_node") # add a str node with pytest.raises( RuntimeError, match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph", ): gs_g = g(nx_g) # mixing oid type, failed nx_g.remove_node("str_node") # remove str node, all nodes are int again gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # success def test_error_on_view_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g._graph = None # graph view always has a _graph attribute with pytest.raises(TypeError, match="graph view can not convert to gs graph"): gs_g = g(nx_g) def test_error_on_mixing_node_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_node(0, weight=1.23) nx_g.add_node("zakky", foo="node") with pytest.raises( RuntimeError, match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph", ): gs_g = g(nx_g) # gs to nx def test_empty_gs_to_nx(self): empty_nx = self.NXGraph(dist=True) empty_gs_graph = g(empty_nx) nx_g = self.NXGraph(empty_gs_graph, dist=True) self.assert_convert_success(empty_gs_graph, nx_g) def test_single_label_gs_to_nx(self): g = self.single_label_g nx_g = self.NXGraph(g, dist=True) self.assert_convert_success(g, nx_g) def test_multi_label_gs_to_nx(self): g = self.multi_label_g nx_g = self.NXGraph(g, dist=True) self.assert_convert_success(g, nx_g) def test_str_oid_gs_to_nx(self): g = self.str_oid_g nx_g = self.NXGraph(g, dist=True) self.assert_convert_success(g, nx_g) def test_error_on_wrong_nx_type(self): g = self.single_label_g with pytest.raises(TypeError): nx_g = nx.DiGraph(g) def test_error_on_duplicate_oid(self): g = self.duplicated_oid_g with pytest.raises(AnalyticalEngineInternalError): nx_g = self.NXGraph(g) @pytest.mark.skip(reason="TODO: open it") def test_multiple_sessions(self): g = self.single_label_g sess2 = graphscope.session(cluster_type="hosts", num_workers=1) g2 = ldbc_sample_single_label_with_sess(sess2, self.data_dir, False) assert g.session_id != g2.session_id nx_g = self.NXGraph(g) nx_g2 = self.NXGraph(g2) self.assert_convert_success(g2, nx_g2) assert nx_g.session_id == g.session_id assert nx_g2.session_id == g2.session_id # copies cg1 = nx_g2.copy() assert cg1.session_id == nx_g2.session_id dg1 = nx_g2.to_directed() assert dg1.session_id == nx_g2.session_id dg2 = nx_g2.to_directed(as_view=True) assert dg2.session_id == nx_g2.session_id # subgraph sg1 = nx_g2.subgraph([274877907301, 274877907299]) assert sg1.session_id == nx_g2.session_id sg2 = nx_g2.edge_subgraph([(274877907301, 274877907299)]) assert sg2.session_id == nx_g2.session_id sess2.close() @pytest.mark.usefixtures("graphscope_session") class TestGraphProjectTest(object): @classmethod def setup_class(cls): cls.NXGraph = nx.Graph edgelist = os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist") cls.g = nx.read_edgelist( edgelist, nodetype=int, data=True, create_using=cls.NXGraph ) cls.g.add_node(1, vdata_str="kdjfao") cls.g.add_node(1, vdata_int=123) def test_project_to_simple(self): # default, e_prop='', v_prop='' sg1 = self.g._project_to_simple() assert ( sg1.schema.vdata_type == types_pb2.NULLVALUE and sg1.schema.edata_type == types_pb2.NULLVALUE ) # to_simple with e_prop sg2 = self.g._project_to_simple(e_prop="edata_float") assert ( sg2.schema.vdata_type == types_pb2.NULLVALUE and sg2.schema.edata_type == types_pb2.DOUBLE ) # to_simple with v_prop sg3 = self.g._project_to_simple(v_prop="vdata_str") assert ( sg3.schema.vdata_type == types_pb2.STRING and sg3.schema.edata_type == types_pb2.NULLVALUE ) # to_simple with e_prop and v_prop sg4 = self.g._project_to_simple(v_prop="vdata_int", e_prop="edata_str") assert ( sg4.schema.vdata_type == types_pb2.INT64 and sg4.schema.edata_type == types_pb2.STRING ) # empty graph to simple empty_g = self.NXGraph() sg5 = empty_g._project_to_simple() assert ( sg5.schema.vdata_type == types_pb2.NULLVALUE and sg5.schema.edata_type == types_pb2.NULLVALUE ) with pytest.raises( InvalidArgumentError, match="graph not contains the vertex property foo" ): sg6 = empty_g._project_to_simple(v_prop="foo") @pytest.mark.skip(reason="It use much memory, exceeds the limit of Github runner") def test_implicit_project_to_simple(self): g = self.g nx.builtin.degree_centrality(g) nx.builtin.single_source_dijkstra_path_length(g, source=6, weight="weight") def test_error_on_not_exist_vertex_property(self): g = self.NXGraph() g.add_node(0, foo="node") with pytest.raises( InvalidArgumentError, match="graph not contains the vertex property weight" ): sg = g._project_to_simple(v_prop="weight") def test_error_on_not_exist_edge_property(self): g = self.NXGraph() g.add_edge(0, 1, weight=3) with pytest.raises( InvalidArgumentError, match="graph not contains the edge property type" ): sg = g._project_to_simple(e_prop="type") @pytest.mark.skip(reason="FIXME: engine can not catch the app throw error now") def test_error_on_some_edges_not_contain_property(self): g = self.g # some edges not contain the property with pytest.raises(RuntimeError): nx.builtin.single_source_dijkstra_path_length( g, source=6, weight="edata_random_int_0" ) @pytest.mark.skip(reason="FIXME: engine can not catch the app throw error now") def test_error_on_some_edges_has_wrong_type(self): g = self.g.copy() # set edge a wrong type g[6][42]["weight"] = "a str" with pytest.raises(RuntimeError): nx.builtin.single_source_dijkstra_path_length(g, source=6, weight="weight") @pytest.mark.skip(reason="find a algorithm that use vertex data") def test_error_on_some_nodes_not_contain_property(self): g = self.g with pytest.raises(RuntimeError): nx.builtin.sssp(weight="vdata_random_int_0") @pytest.mark.skip(reason="find a algorithm that use vertex data") def test_error_on_some_nodes_has_wrong_type(self): g = self.g.copy() g[0]["weight"] = "a str" with pytest.raises(RuntimeError): nx.builtin.sssp(weight="weight") @pytest.mark.usefixtures("graphscope_session") class TestDigraphTransformation(TestGraphTransformation): @classmethod def setup_class(cls): cls.NXGraph = nx.DiGraph data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample") cls.single_label_g = ldbc_sample_single_label(data_dir, True) cls.multi_label_g = ldbc_sample_multi_labels(data_dir, True) cls.duplicated_oid_g = ldbc_sample_with_duplicated_oid(data_dir, True) # FIXME: this is tricky way to create a str gs graph les_g = nx.les_miserables_graph() di_les_g = nx.DiGraph() di_les_g.add_edges_from(di_les_g.edges.data()) cls.str_oid_g = g(di_les_g) @classmethod def teardown_class(cls): cls.single_label_g.unload() cls.multi_label_g.unload() cls.duplicated_oid_g.unload() def test_error_on_wrong_nx_type(self): g = self.single_label_g with pytest.raises(TypeError): nx_g = nx.Graph(g) @pytest.mark.usefixtures("graphscope_session") class TestDiGraphProjectTest(TestGraphProjectTest): @classmethod def setup_class(cls): cls.NXGraph = nx.DiGraph edgelist = os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist") cls.g = nx.read_edgelist( edgelist, nodetype=int, data=True, create_using=cls.NXGraph ) cls.g.add_node(0, vdata_str="kdjfao") cls.g.add_node(1, vdata_int=123)
[ "graphscope.nx.complete_graph", "pytest.fixture", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.read_edgelist", "graphscope.session", "graphscope.client.session.g.add_edge", "pytest.mark.usefixtures", "graphscope.client.session.g._project_to_simple", "graphscope.nx.les_miserables_graph", "graphscope.g", "graphscope.client.session.g", "pytest.mark.skip", "graphscope.client.session.g.add_node", "graphscope.nx.DiGraph", "graphscope.set_option", "pytest.raises", "graphscope.nx.builtin.degree_centrality", "os.path.expandvars", "os.path.join", "graphscope.nx.builtin.sssp", "graphscope.nx.Graph" ]
[((998, 1029), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1012, 1029), False, 'import pytest\n'), ((3669, 3714), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (3692, 3714), False, 'import pytest\n'), ((12162, 12207), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (12185, 12207), False, 'import pytest\n'), ((16188, 16233), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (16211, 16233), False, 'import pytest\n'), ((17186, 17231), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (17209, 17231), False, 'import pytest\n'), ((1060, 1096), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (1081, 1096), False, 'import graphscope\n'), ((1101, 1161), 'graphscope.set_option', 'graphscope.set_option', ([], {'initializing_interactive_engine': '(False)'}), '(initializing_interactive_engine=False)\n', (1122, 1161), False, 'import graphscope\n'), ((1174, 1229), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (1192, 1229), False, 'import graphscope\n'), ((1346, 1377), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed'}), '(directed=directed)\n', (1358, 1377), False, 'import graphscope\n'), ((2102, 2133), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed'}), '(directed=directed)\n', (2114, 2133), False, 'import graphscope\n'), ((3007, 3038), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed'}), '(directed=directed)\n', (3019, 3038), False, 'import graphscope\n'), ((11103, 11143), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""TODO: open it"""'}), "(reason='TODO: open it')\n", (11119, 11143), False, 'import pytest\n'), ((14052, 14138), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""It use much memory, exceeds the limit of Github runner"""'}), "(reason=\n 'It use much memory, exceeds the limit of Github runner')\n", (14068, 14138), False, 'import pytest\n'), ((14921, 14999), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: engine can not catch the app throw error now"""'}), "(reason='FIXME: engine can not catch the app throw error now')\n", (14937, 14999), False, 'import pytest\n'), ((15304, 15382), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: engine can not catch the app throw error now"""'}), "(reason='FIXME: engine can not catch the app throw error now')\n", (15320, 15382), False, 'import pytest\n'), ((15669, 15733), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""find a algorithm that use vertex data"""'}), "(reason='find a algorithm that use vertex data')\n", (15685, 15733), False, 'import pytest\n'), ((15919, 15983), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""find a algorithm that use vertex data"""'}), "(reason='find a algorithm that use vertex data')\n", (15935, 15983), False, 'import pytest\n'), ((3852, 3900), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/ldbc_sample"""'], {}), "('${GS_TEST_DIR}/ldbc_sample')\n", (3870, 3900), False, 'import os\n'), ((4212, 4237), 'graphscope.nx.les_miserables_graph', 'nx.les_miserables_graph', ([], {}), '()\n', (4235, 4237), True, 'import graphscope.nx as nx\n'), ((4262, 4270), 'graphscope.client.session.g', 'g', (['les_g'], {}), '(les_g)\n', (4263, 4270), False, 'from graphscope.client.session import g\n'), ((5436, 5449), 'graphscope.client.session.g', 'g', (['empty_nx_g'], {}), '(empty_nx_g)\n', (5437, 5449), False, 'from graphscope.client.session import g\n'), ((5661, 5668), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (5662, 5668), False, 'from graphscope.client.session import g\n'), ((5769, 5817), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(10)'], {'create_using': 'self.NXGraph'}), '(10, create_using=self.NXGraph)\n', (5786, 5817), True, 'import graphscope.nx as nx\n'), ((5833, 5840), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (5834, 5840), False, 'from graphscope.client.session import g\n'), ((6183, 6190), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6184, 6190), False, 'from graphscope.client.session import g\n'), ((6293, 6318), 'graphscope.nx.les_miserables_graph', 'nx.les_miserables_graph', ([], {}), '()\n', (6316, 6318), True, 'import graphscope.nx as nx\n'), ((6334, 6341), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6335, 6341), False, 'from graphscope.client.session import g\n'), ((6973, 6980), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (6974, 6980), False, 'from graphscope.client.session import g\n'), ((7170, 7178), 'graphscope.client.session.g', 'g', (['nx_g2'], {}), '(nx_g2)\n', (7171, 7178), False, 'from graphscope.client.session import g\n'), ((7367, 7375), 'graphscope.client.session.g', 'g', (['nx_g3'], {}), '(nx_g3)\n', (7368, 7375), False, 'from graphscope.client.session import g\n'), ((7590, 7598), 'graphscope.client.session.g', 'g', (['nx_g4'], {}), '(nx_g4)\n', (7591, 7598), False, 'from graphscope.client.session import g\n'), ((7991, 7998), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (7992, 7998), False, 'from graphscope.client.session import g\n'), ((8317, 8324), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8318, 8324), False, 'from graphscope.client.session import g\n'), ((8442, 8449), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8443, 8449), False, 'from graphscope.client.session import g\n'), ((8564, 8571), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8565, 8571), False, 'from graphscope.client.session import g\n'), ((8673, 8680), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8674, 8680), False, 'from graphscope.client.session import g\n'), ((8886, 8893), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (8887, 8893), False, 'from graphscope.client.session import g\n'), ((9343, 9350), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9344, 9350), False, 'from graphscope.client.session import g\n'), ((10178, 10189), 'graphscope.client.session.g', 'g', (['empty_nx'], {}), '(empty_nx)\n', (10179, 10189), False, 'from graphscope.client.session import g\n'), ((11230, 11285), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (11248, 11285), False, 'import graphscope\n'), ((12337, 12405), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (12355, 12405), False, 'import os\n'), ((12422, 12499), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['edgelist'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'cls.NXGraph'}), '(edgelist, nodetype=int, data=True, create_using=cls.NXGraph)\n', (12438, 12499), True, 'import graphscope.nx as nx\n'), ((14208, 14239), 'graphscope.nx.builtin.degree_centrality', 'nx.builtin.degree_centrality', (['g'], {}), '(g)\n', (14236, 14239), True, 'import graphscope.nx as nx\n'), ((14248, 14323), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""weight"""'}), "(g, source=6, weight='weight')\n", (14293, 14323), True, 'import graphscope.nx as nx\n'), ((14415, 14440), 'graphscope.client.session.g.add_node', 'g.add_node', (['(0)'], {'foo': '"""node"""'}), "(0, foo='node')\n", (14425, 14440), False, 'from graphscope.client.session import g\n'), ((14712, 14738), 'graphscope.client.session.g.add_edge', 'g.add_edge', (['(0)', '(1)'], {'weight': '(3)'}), '(0, 1, weight=3)\n', (14722, 14738), False, 'from graphscope.client.session import g\n'), ((16387, 16435), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/ldbc_sample"""'], {}), "('${GS_TEST_DIR}/ldbc_sample')\n", (16405, 16435), False, 'import os\n'), ((16732, 16757), 'graphscope.nx.les_miserables_graph', 'nx.les_miserables_graph', ([], {}), '()\n', (16755, 16757), True, 'import graphscope.nx as nx\n'), ((16777, 16789), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (16787, 16789), True, 'import graphscope.nx as nx\n'), ((16869, 16880), 'graphscope.client.session.g', 'g', (['di_les_g'], {}), '(di_les_g)\n', (16870, 16880), False, 'from graphscope.client.session import g\n'), ((17379, 17447), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist')\n", (17397, 17447), False, 'import os\n'), ((17464, 17541), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['edgelist'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'cls.NXGraph'}), '(edgelist, nodetype=int, data=True, create_using=cls.NXGraph)\n', (17480, 17541), True, 'import graphscope.nx as nx\n'), ((1425, 1464), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (1437, 1464), False, 'import os\n'), ((1542, 1597), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (1554, 1597), False, 'import os\n'), ((1808, 1847), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (1820, 1847), False, 'import os\n'), ((1925, 1980), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (1937, 1980), False, 'import os\n'), ((2431, 2467), 'os.path.join', 'os.path.join', (['prefix', '"""post_0_0.csv"""'], {}), "(prefix, 'post_0_0.csv')\n", (2443, 2467), False, 'import os\n'), ((2747, 2798), 'os.path.join', 'os.path.join', (['prefix', '"""person_knows_person_0_0.csv"""'], {}), "(prefix, 'person_knows_person_0_0.csv')\n", (2759, 2798), False, 'import os\n'), ((3184, 3222), 'os.path.join', 'os.path.join', (['prefix', '"""person_0_0.csv"""'], {}), "(prefix, 'person_0_0.csv')\n", (3196, 3222), False, 'import os\n'), ((3475, 3526), 'os.path.join', 'os.path.join', (['prefix', '"""person_knows_person_0_0.csv"""'], {}), "(prefix, 'person_knows_person_0_0.csv')\n", (3487, 3526), False, 'import os\n'), ((9019, 9165), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"""'}), '(RuntimeError, match=\n "The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"\n )\n', (9032, 9165), False, 'import pytest\n'), ((9211, 9218), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9212, 9218), False, 'from graphscope.client.session import g\n'), ((9574, 9646), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""graph view can not convert to gs graph"""'}), "(TypeError, match='graph view can not convert to gs graph')\n", (9587, 9646), False, 'import pytest\n'), ((9667, 9674), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (9668, 9674), False, 'from graphscope.client.session import g\n'), ((9859, 10005), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"""'}), '(RuntimeError, match=\n "The vertex type is not consistent <class \'int\'> vs <class \'str\'>, can not convert it to arrow graph"\n )\n', (9872, 10005), False, 'import pytest\n'), ((10051, 10058), 'graphscope.client.session.g', 'g', (['nx_g'], {}), '(nx_g)\n', (10052, 10058), False, 'from graphscope.client.session import g\n'), ((10866, 10890), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (10879, 10890), False, 'import pytest\n'), ((10911, 10924), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['g'], {}), '(g)\n', (10921, 10924), True, 'import graphscope.nx as nx\n'), ((11016, 11060), 'pytest.raises', 'pytest.raises', (['AnalyticalEngineInternalError'], {}), '(AnalyticalEngineInternalError)\n', (11029, 11060), False, 'import pytest\n'), ((13876, 13968), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the vertex property foo"""'}), "(InvalidArgumentError, match=\n 'graph not contains the vertex property foo')\n", (13889, 13968), False, 'import pytest\n'), ((14454, 14549), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the vertex property weight"""'}), "(InvalidArgumentError, match=\n 'graph not contains the vertex property weight')\n", (14467, 14549), False, 'import pytest\n'), ((14585, 14622), 'graphscope.client.session.g._project_to_simple', 'g._project_to_simple', ([], {'v_prop': '"""weight"""'}), "(v_prop='weight')\n", (14605, 14622), False, 'from graphscope.client.session import g\n'), ((14752, 14843), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""graph not contains the edge property type"""'}), "(InvalidArgumentError, match=\n 'graph not contains the edge property type')\n", (14765, 14843), False, 'import pytest\n'), ((14879, 14914), 'graphscope.client.session.g._project_to_simple', 'g._project_to_simple', ([], {'e_prop': '"""type"""'}), "(e_prop='type')\n", (14899, 14914), False, 'from graphscope.client.session import g\n'), ((15139, 15166), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (15152, 15166), False, 'import pytest\n'), ((15180, 15272), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""edata_random_int_0"""'}), "(g, source=6, weight=\n 'edata_random_int_0')\n", (15225, 15272), True, 'import graphscope.nx as nx\n'), ((15546, 15573), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (15559, 15573), False, 'import pytest\n'), ((15587, 15662), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['g'], {'source': '(6)', 'weight': '"""weight"""'}), "(g, source=6, weight='weight')\n", (15632, 15662), True, 'import graphscope.nx as nx\n'), ((15827, 15854), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (15840, 15854), False, 'import pytest\n'), ((15868, 15912), 'graphscope.nx.builtin.sssp', 'nx.builtin.sssp', ([], {'weight': '"""vdata_random_int_0"""'}), "(weight='vdata_random_int_0')\n", (15883, 15912), True, 'import graphscope.nx as nx\n'), ((16111, 16138), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (16124, 16138), False, 'import pytest\n'), ((16152, 16184), 'graphscope.nx.builtin.sssp', 'nx.builtin.sssp', ([], {'weight': '"""weight"""'}), "(weight='weight')\n", (16167, 16184), True, 'import graphscope.nx as nx\n'), ((17126, 17150), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (17139, 17150), False, 'import pytest\n'), ((17171, 17182), 'graphscope.nx.Graph', 'nx.Graph', (['g'], {}), '(g)\n', (17179, 17182), True, 'import graphscope.nx as nx\n'), ((2314, 2352), 'os.path.join', 'os.path.join', (['prefix', '"""person_0_0.csv"""'], {}), "(prefix, 'person_0_0.csv')\n", (2326, 2352), False, 'import os\n'), ((2565, 2620), 'os.path.join', 'os.path.join', (['prefix', '"""comment_replyOf_comment_0_0.csv"""'], {}), "(prefix, 'comment_replyOf_comment_0_0.csv')\n", (2577, 2620), False, 'import os\n'), ((3086, 3123), 'os.path.join', 'os.path.join', (['prefix', '"""place_0_0.csv"""'], {}), "(prefix, 'place_0_0.csv')\n", (3098, 3123), False, 'import os\n'), ((3299, 3351), 'os.path.join', 'os.path.join', (['prefix', '"""place_isPartOf_place_0_0.csv"""'], {}), "(prefix, 'place_isPartOf_place_0_0.csv')\n", (3311, 3351), False, 'import os\n'), ((2195, 2234), 'os.path.join', 'os.path.join', (['prefix', '"""comment_0_0.csv"""'], {}), "(prefix, 'comment_0_0.csv')\n", (2207, 2234), False, 'import os\n')]
import pytest from networkx.generators.tests.test_internet_as_graphs import TestInternetASTopology import graphscope.nx as nx from graphscope.nx import is_directed from graphscope.nx import neighbors from graphscope.nx.generators.internet_as_graphs import random_internet_as_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestInternetASTopology) class TestInternetASTopology: pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((351, 396), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (374, 396), False, 'import pytest\n'), ((398, 448), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestInternetASTopology'], {}), '(TestInternetASTopology)\n', (424, 448), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Classes and functions used to construct dags. """ import hashlib import uuid from google.protobuf.json_format import MessageToJson from graphscope.proto import op_def_pb2 class Operation(object): """Represents a dag op that performs computation on tensors. For example :code:`g2 = g1.add_vertices("path")` create an :code:`Operation` of type "ADD_LABELS" that takes operation of :code:`g1` as input, and produces a graph dag node :code:`g2` which contains this operation as output. After the dag has been launched in a session, an `Operation` can be executed by :code`op.eval()` or passing it to :code:`session.run`. """ def __init__( self, session_id, op_type, inputs=None, output_types=None, config=None, query_args=None, ): """Creates an :code:`graphscope.framework.operation.Operation`. Args: op_type: :code:`types_pb2.OperationType` Value for the "op" attribute of the OpDef proto. inputs: A list of `Operations` that will be the parents to self output_types: The operation's output type config: Dictionary where the key is the attribute name (a string) and the value is the respective "attr" attribute of the OpDef proto (an AttrValue). query_args: Values that used as query parameters when evaluating app. Raises: TypeError: value in inputs is not a :class:`Operation` """ self._session_id = session_id self._op_def = op_def_pb2.OpDef( op=op_type, key=uuid.uuid4().hex, output_type=output_types ) self._parents = list() if config: for k, v in config.items(): self._op_def.attr[k].CopyFrom(v) if query_args is not None: self._op_def.query_args.CopyFrom(query_args) if inputs: for op in inputs: if not isinstance(op, Operation): raise TypeError("Input op must be an Operation: {0}".format(op)) self.add_parent(op) self._output_types = output_types self._evaluated = False self._leaf = False @property def key(self): """Unique key for each :code:`op_def_pb2.OpDef`""" return self._op_def.key @property def parents(self): """A list of :code:`graphscope.framework.operation.Operation`""" return self._parents @property def evaluated(self): return self._evaluated @evaluated.setter def evaluated(self, value): self._evaluated = bool(value) @property def type(self): return self._op_def.op @property def output_types(self): return self._output_types @property def signature(self): """Signature of its parents' signatures and its own parameters. Used to unique identify one `Operation` with fixed configuration, if the configuration changed, the signature will be changed accordingly. Note that this method has not been used. """ content = "" for op in self._parents: content += str(op.as_op_def) content += str(self.as_op_def()) return hashlib.sha224(content.encode()).hexdigest() def is_leaf_op(self): return self._leaf def eval(self, leaf=True): """Evaluate by :code:`sess.run`. Args: leaf (bool, optional): Leaf Operation means there is no successor. """ # NB: to void cycle import # pylint: disable=import-outside-toplevel, cyclic-import from graphscope.client.session import get_session_by_id self._leaf = leaf sess = get_session_by_id(self._session_id) if not self._leaf: sess.dag.add_op(self) res = sess.run(self) return res def add_parent(self, op): self._parents.append(op) self._op_def.parents.extend([op.key]) def as_op_def(self): return self._op_def def __str__(self): return str(self.as_op_def()) def __repr__(self): return "<graphscope.framework.operation.Operation '%s'(%s)>" % ( self.type, self.key, ) def to_json(self): """Get json represented op.""" return MessageToJson(self._op_def)
[ "graphscope.client.session.get_session_by_id", "google.protobuf.json_format.MessageToJson", "uuid.uuid4" ]
[((4517, 4552), 'graphscope.client.session.get_session_by_id', 'get_session_by_id', (['self._session_id'], {}), '(self._session_id)\n', (4534, 4552), False, 'from graphscope.client.session import get_session_by_id\n'), ((5118, 5145), 'google.protobuf.json_format.MessageToJson', 'MessageToJson', (['self._op_def'], {}), '(self._op_def)\n', (5131, 5145), False, 'from google.protobuf.json_format import MessageToJson\n'), ((2376, 2388), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2386, 2388), False, 'import uuid\n')]
# Copyright 2020 Alibaba Group Holding Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """User can use this file to run self-defined java app locally.""" import argparse import logging import os import shutil import subprocess import sys import graphscope from graphscope import JavaApp from graphscope.dataset import load_p2p_network graphscope.set_option(show_log=True) POSSIBLE_APP_TYPES = [ "default_property", "parallel_property", "default_simple", "parallel_simple", ] JAVA_LONG = "java.lang.Long" JAVA_INT = "java.lang.Integer" JAVA_DOUBLE = "java.lang.Double" JAVA_FLOAT = "java.lang.Float" LOG_FORMAT = "[%(asctime)s]-[%(levelname)s]: %(message)s" logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) logger = logging.getLogger("java-app-runner") def parse_args(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( "--app", type=str, required=True, default="{}", help="The fully-specified name of your java app", ) parser.add_argument( "--jar_path", type=str, required=True, default="{}", help="The path where your packed jar resides.", ) parser.add_argument( "--arguments", type=str, default="{}", help="The params you want to pass to this app's context, format them like 'src=4,threadNum=1'", ) parser.add_argument( "--directed", type=bool, default=False, help="Run on directed graph or not" ) return parser.parse_args() def parse_java_app(java_app_class: str, java_jar_full_path: str): _java_app_type = "" _frag_param_str = "" _java_inner_context_type = "" _java_executable = "java" if shutil.which("java") is None: if os.environ.get("JAVA_HOME", None) is not None: _java_executable = os.path.join(os.environ.get("JAVA_HOME"), "bin", "java") if not os.path.isfile(_java_executable) or not os.access( _java_executable, os.X_OK ): raise RuntimeError( "Java executable not found, you shall install a java runtime." ) parse_user_app_cmd = [ _java_executable, "-cp", "{}".format(java_jar_full_path), "com.alibaba.graphscope.utils.AppBaseParser", java_app_class, ] logger.info(" ".join(parse_user_app_cmd)) parse_user_app_process = subprocess.Popen( parse_user_app_cmd, env=os.environ.copy(), encoding="utf-8", errors="replace", stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, bufsize=1, ) out, err = parse_user_app_process.communicate() logger.info(err) for line in out.split("\n"): logger.info(line) if len(line) == 0: continue if line.find("DefaultPropertyApp") != -1: _java_app_type = "default_property" elif line.find("ParallelPropertyApp") != -1: _java_app_type = "parallel_property" elif line.find("DefaultAppBase") != -1: _java_app_type = "default_simple" elif line.find("ParallelAppBase") != -1: _java_app_type = "parallel_simple" elif line.find("Error") != -1: raise Exception("Error occured in verifying user app") elif line.find("TypeParams") != -1: _frag_param_str = line.split(":")[-1].strip() elif line.find("ContextType") != -1: _java_inner_context_type = line.split(":")[-1].strip() logger.info( "Java app type: {}, frag type str: {}, ctx type: {}".format( _java_app_type, _frag_param_str, _java_inner_context_type ) ) parse_user_app_process.wait() return _java_app_type, _frag_param_str, _java_inner_context_type def java_type_to_gs_type(java_type: str): if java_type == JAVA_LONG: dataType = "int64" elif java_type == JAVA_INT: dataType = "int" elif java_type == JAVA_DOUBLE: dataType = "double" elif java_type == JAVA_FLOAT: dataType = "float" else: logger.error("Unrecognized type: {}".format(java_type)) return dataType def parse_and_check_type_params(type_params: str): type_params = type_params.strip() types = type_params.split(",") if len(types) != 4: raise Exception("Expected 4 type params in your app.") if types[0] != JAVA_LONG: logger.error("Currently we only accept int64 as oid") sys.exit(1) if types[1] != JAVA_LONG: logger.error("Currently we only accept int64_t as vid") sys.exit(1) vdataType = java_type_to_gs_type(types[2]) edataType = java_type_to_gs_type(types[3]) return vdataType, edataType def run_app( vdataType: str, edataType: str, app_type: str, directed: bool, jar_path: str, java_app_class: str, param_str, ): sess = graphscope.session(cluster_type="hosts", num_workers=1) graph = sess.g(directed=directed) graph = load_p2p_network(sess) if "simple" in app_type: graph = graph.project(vertices={"host": ['id']}, edges={"connect": ["dist"]}) app = JavaApp(full_jar_path=jar_path, java_app_class=java_app_class) exec("ctx=app(graph, {})".format(param_str)) logger.info("Successfully verify app: {}".format(java_app_class)) if __name__ == "__main__": args = parse_args() logger.info("Running app\t\t\t\t={}".format(args.app)) logger.info("Jar apth\t\t\t\t={}".format(args.jar_path)) logger.info("Test data dir\t\t\t\t={}".format(args.test_dir)) logger.info("Arguments to java context\t\t={}".format(args.arguments)) logger.info("Directed: \t\t\t\t={}".format(args.directed)) app_type, type_params, _ = parse_java_app(args.app, args.jar_path) if app_type not in POSSIBLE_APP_TYPES: logger.error("Unsupported app type:{}".format(app_type)) vdataType, edataType = parse_and_check_type_params(type_params) logger.info("vdataType: [{}], edataType: [{}]".format(vdataType, edataType)) run_app( vdataType, edataType, app_type, args.directed, args.jar_path, args.app, args.arguments, )
[ "logging.basicConfig", "logging.getLogger", "argparse.ArgumentParser", "shutil.which", "os.environ.get", "os.access", "os.environ.copy", "graphscope.session", "os.path.isfile", "graphscope.set_option", "graphscope.dataset.load_p2p_network", "sys.exit", "graphscope.JavaApp" ]
[((845, 881), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (866, 881), False, 'import graphscope\n'), ((1186, 1244), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': 'LOG_FORMAT'}), '(level=logging.INFO, format=LOG_FORMAT)\n', (1205, 1244), False, 'import logging\n'), ((1254, 1290), 'logging.getLogger', 'logging.getLogger', (['"""java-app-runner"""'], {}), "('java-app-runner')\n", (1271, 1290), False, 'import logging\n'), ((1324, 1403), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (1347, 1403), False, 'import argparse\n'), ((5501, 5556), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (5519, 5556), False, 'import graphscope\n'), ((5607, 5629), 'graphscope.dataset.load_p2p_network', 'load_p2p_network', (['sess'], {}), '(sess)\n', (5623, 5629), False, 'from graphscope.dataset import load_p2p_network\n'), ((5760, 5822), 'graphscope.JavaApp', 'JavaApp', ([], {'full_jar_path': 'jar_path', 'java_app_class': 'java_app_class'}), '(full_jar_path=jar_path, java_app_class=java_app_class)\n', (5767, 5822), False, 'from graphscope import JavaApp\n'), ((2291, 2311), 'shutil.which', 'shutil.which', (['"""java"""'], {}), "('java')\n", (2303, 2311), False, 'import shutil\n'), ((5082, 5093), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5090, 5093), False, 'import sys\n'), ((5196, 5207), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5204, 5207), False, 'import sys\n'), ((2332, 2365), 'os.environ.get', 'os.environ.get', (['"""JAVA_HOME"""', 'None'], {}), "('JAVA_HOME', None)\n", (2346, 2365), False, 'import os\n'), ((3033, 3050), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (3048, 3050), False, 'import os\n'), ((2423, 2450), 'os.environ.get', 'os.environ.get', (['"""JAVA_HOME"""'], {}), "('JAVA_HOME')\n", (2437, 2450), False, 'import os\n'), ((2482, 2514), 'os.path.isfile', 'os.path.isfile', (['_java_executable'], {}), '(_java_executable)\n', (2496, 2514), False, 'import os\n'), ((2522, 2558), 'os.access', 'os.access', (['_java_executable', 'os.X_OK'], {}), '(_java_executable, os.X_OK)\n', (2531, 2558), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pandas as pd import pytest from networkx.testing.utils import assert_graphs_equal import graphscope import graphscope.nx as nx from graphscope.framework.loader import Loader from graphscope.nx import NetworkXError from graphscope.nx.tests.classes.test_digraph import TestDiGraph as _TestDiGraph from graphscope.nx.tests.classes.test_graph import TestGraph as _TestGraph from graphscope.nx.tests.utils import almost_equal def k3_graph(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "3v.csv"), delimiter="|"), "vertex" ) if directed: graph = graph.add_edges( Loader(os.path.join(prefix, "k3_directed.csv"), delimiter="|"), "edge", ) else: graph = graph.add_edges( Loader(os.path.join(prefix, "k3_undirected.csv"), delimiter="|"), "edge", ) return graph def p3_graph(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "3v.csv"), delimiter="|"), "vertex" ) graph = graph.add_edges( Loader(os.path.join(prefix, "p3_directed.csv"), delimiter="|"), "edge", ) return graph def simple_label_graph(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices(Loader(os.path.join(prefix, "simple_v_0.csv")), "v-0") graph = graph.add_vertices(Loader(os.path.join(prefix, "simple_v_1.csv")), "v-1") graph = graph.add_edges( Loader(os.path.join(prefix, "simple_e_0.csv")), "e-0", src_label="v-0", dst_label="v-0", ) graph = graph.add_edges( Loader(os.path.join(prefix, "simple_e_1.csv")), "e-1", src_label="v-0", dst_label="v-1", ) graph = graph.add_edges( Loader(os.path.join(prefix, "simple_e_2.csv")), "e-2", src_label="v-1", dst_label="v-1", ) return graph def simple_label_multigraph(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices(Loader(os.path.join(prefix, "simple_v_0.csv")), "v-0") graph = graph.add_vertices(Loader(os.path.join(prefix, "simple_v_1.csv")), "v-1") graph = graph.add_edges( Loader(os.path.join(prefix, "simple_e_0.csv")), "e-0", src_label="v-0", dst_label="v-0", ) graph = graph.add_edges( Loader(os.path.join(prefix, "simple_e_1_multiple.csv")), "e-1", src_label="v-0", dst_label="v-1", ) graph = graph.add_edges( Loader(os.path.join(prefix, "simple_e_2.csv")), "e-2", src_label="v-1", dst_label="v-1", ) return graph def p2p_31_graph(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "p2p-31.v"), delimiter=" ", header_row=False), "vertex", ) graph = graph.add_edges( Loader(os.path.join(prefix, "p2p-31.e"), delimiter=" ", header_row=False), "edge", ) return graph @pytest.mark.usefixtures("graphscope_session") class TestGraphCopyOnWrite(_TestGraph): def setup_method(self): self.Graph = nx.Graph self.k3nodes = [0, 1, 2] self.k3edges = [(0, 1), (0, 2), (1, 2)] data_dir = os.path.expandvars("${GS_TEST_DIR}/networkx") self.k3 = k3_graph(data_dir, False) self.K3 = nx.Graph(self.k3, default_label="vertex") def test_update(self): # specify both edges and nodes G = self.K3.copy() G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})]) nlist = [ (0, {}), (1, {}), (2, {}), (3, {}), (4, {"size": 2}), (5, {}), (6, {}), (7, {}), ] assert sorted(G.nodes.data()) == nlist if G.is_directed(): elist = [ (0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), (2, 0, {}), (2, 1, {}), (4, 5, {}), (6, 7, {"weight": 2}), ] else: if os.environ.get("DEPLOYMENT", None) == "standalone": elist = [ (0, 1, {}), (0, 2, {}), (1, 2, {}), (4, 5, {}), (6, 7, {"weight": 2}), ] else: # num_workers=2 elist = [ (0, 1, {}), (2, 0, {}), # N.B: diff with _TestGraph, update the order of id (2, 1, {}), (4, 5, {}), (6, 7, {"weight": 2}), ] assert sorted(G.edges.data()) == elist assert G.graph == {} # no keywords -- order is edges, nodes G = self.K3.copy() G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})]) assert sorted(G.nodes.data()) == nlist assert sorted(G.edges.data()) == elist assert G.graph == {} # update using only a graph G = self.Graph() G.graph["foo"] = "bar" G.add_node(2, data=4) G.add_edge(0, 1, weight=0.5) GG = G.copy() H = self.Graph() GG.update(H) assert_graphs_equal(G, GG) H.update(G) assert_graphs_equal(H, G) # update nodes only H = self.Graph() H.update(nodes=[3, 4]) assert H.nodes ^ {3, 4} == set() assert H.size() == 0 # update edges only H = self.Graph() H.update(edges=[(3, 4)]) if H.is_directed(): assert sorted(H.edges.data()) == [(3, 4, {})] else: assert sorted(H.edges.data()) in ([(3, 4, {})], [(4, 3, {})]) assert H.size() == 1 # No inputs -> exception with pytest.raises(nx.NetworkXError): nx.Graph().update() @pytest.mark.usefixtures("graphscope_session") class TestDiGraphCopyOnWrite(_TestDiGraph): def setup_method(self): data_dir = os.path.expandvars("${GS_TEST_DIR}/networkx") self.Graph = nx.DiGraph # build K3 self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.k3 = k3_graph(data_dir, True) self.K3 = nx.DiGraph(self.k3, default_label="vertex") self.p3 = p3_graph(data_dir, True) self.P3 = nx.DiGraph(self.p3, default_label="vertex") @pytest.mark.usefixtures("graphscope_session") class TestBuiltinCopyOnWrite: def setup_method(self): data_dir = os.path.expandvars("${GS_TEST_DIR}/networkx") p2p_dir = os.path.expandvars("${GS_TEST_DIR}") self.simple = simple_label_graph(data_dir, True) self.multi_simple = simple_label_multigraph(data_dir, True) self.K3 = k3_graph(data_dir, False) self.SG = nx.DiGraph(self.simple, default_label="v-0") self.SG.pagerank = { 1: 0.03721197, 2: 0.05395735, 3: 0.04150565, 4: 0.37508082, 5: 0.20599833, 6: 0.28624589, } self.SG.auth = { 1: 0.165000, 2: 0.243018, 3: 0.078017, 4: 0.078017, 5: 0.270943, 6: 0.165000, } self.SG.hub = { 1: 0.182720, 2: 0.0, 3: 0.386437, 4: 0.248121, 5: 0.138316, 6: 0.044404, } self.SG.eigen = { 1: 3.201908045277076e-06, 2: 6.4038160905537886e-06, 3: 3.201908045277076e-06, 5: 0.40044823300165794, 4: 0.6479356498234745, 6: 0.6479356498234745, } self.SG.katz = { 1: 0.37871516522035104, 2: 0.4165866814015425, 3: 0.37871516522035104, 5: 0.42126739520601203, 4: 0.4255225997990211, 6: 0.4255225997990211, } # FIXME(acezen): p2p_31_graph loading fail in ci, open when fixed the problem. (fixme) # self.p2p_31 = p2p_31_graph(p2p_dir, False) # self.P2P = nx.Graph(self.p2p_31, default_label="vertex") # self.P2P.sssp = dict( # pd.read_csv( # "{}/p2p-31-sssp".format(os.path.expandvars("${GS_TEST_DIR}")), # sep=" ", # header=None, # prefix="", # ).values # ) def test_error_with_multigraph(self): with pytest.raises( NetworkXError, match="Graph is multigraph, cannot be converted to networkx graph", ): MSG = nx.DiGraph(self.multi_simple) def test_single_source_dijkstra_path_length(self): ret = nx.builtin.single_source_dijkstra_path_length( self.SG, source=1, weight="weight" ) assert ret == {1: 0.0, 2: 1.0, 3: 1.0, 4: 3.0, 5: 2.0, 6: 3.0} # p2p_ans = nx.builtin.single_source_dijkstra_path_length( # self.P2P, source=6, weight="f2" # ) # assert replace_with_inf(p2p_ans) == self.P2P.sssp def test_wcc(self): ret = nx.builtin.weakly_connected_components(self.SG) assert ret == {1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0, 6: 0.0} def test_pagerank(self): p = nx.builtin.pagerank(self.SG, alpha=0.9, tol=1.0e-08) for n in p: assert almost_equal(p[n], self.SG.pagerank[n], places=4) def test_hits(self): h, a = nx.builtin.hits(self.SG, tol=1.0e-08) for n in h: assert almost_equal(h[n], self.SG.hub[n], places=4) assert almost_equal(a[n], self.SG.auth[n], places=4) def test_degree_centrality(self): ret = nx.builtin.degree_centrality(self.SG) assert ret == { 1: 0.6, 2: 0.4, 3: 0.8, 5: 0.8, 4: 0.8, 6: 0.6, } def test_eigenvector_centrality(self): ret = nx.builtin.eigenvector_centrality(self.SG) for n in ret: assert almost_equal(ret[n], self.SG.eigen[n], places=12) def test_katz_centrality(self): ret = nx.builtin.katz_centrality(self.SG) for n in ret: assert almost_equal(ret[n], self.SG.katz[n], places=12) def test_has_path(self): assert nx.builtin.has_path(self.SG, source=1, target=6) def test_average_shortest_path_length(self): assert nx.builtin.average_shortest_path_length(self.SG) == 0.8 def test_bfs_edges(self): ret = nx.builtin.bfs_edges(self.SG, 1, depth_limit=10) assert sorted(ret) == [[1, 2], [1, 3], [3, 5], [5, 4], [5, 6]] def bfs_tree(self): ret = nx.builtin.bfs_tree(self.SG, 1, depth_limit=10) assert sorted(ret) == [1, 2, 3, 4, 5, 6] def test_k_core(self): ret = nx.builtin.k_core(self.SG, k=1) assert ret is not None def test_clustering(self): ret = nx.builtin.clustering(self.SG) assert ret == {1: 0.5, 2: 1.0, 3: 0.2, 5: 0.4, 4: 0.5, 6: 1.0} def test_triangles(self): ret = nx.builtin.triangles(self.K3) assert ret == {2: 1, 0: 1, 1: 1} def test_average_clustering(self): ret = nx.builtin.average_clustering(self.SG) assert almost_equal(ret, 0.6, places=4) def test_degree_assortativity_coefficient(self): ret = nx.builtin.degree_assortativity_coefficient(self.SG) assert almost_equal(ret, -0.25000000000000033, places=12) def test_node_boundary(self): ret = nx.builtin.node_boundary(self.SG, [1, 2]) assert ret == {3} def test_edge_boundary(self): ret = nx.builtin.edge_boundary(self.SG, [1, 2]) assert list(ret) == [(1, 3)] def test_attribute_assortativity_coefficient(self): ret = nx.builtin.attribute_assortativity_coefficient(self.SG, attribute="attr") assert almost_equal(ret, -0.17647058823529418, places=12) def test_numeric_assortativity_coefficient(self): ret = nx.builtin.numeric_assortativity_coefficient(self.SG, attribute="attr") assert almost_equal(ret, 0.5383819020581653, places=12)
[ "graphscope.nx.builtin.edge_boundary", "graphscope.nx.builtin.bfs_tree", "graphscope.nx.builtin.k_core", "graphscope.nx.builtin.average_shortest_path_length", "graphscope.nx.builtin.numeric_assortativity_coefficient", "graphscope.nx.builtin.bfs_edges", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.builtin.clustering", "graphscope.nx.builtin.degree_assortativity_coefficient", "graphscope.nx.builtin.eigenvector_centrality", "graphscope.nx.builtin.average_clustering", "graphscope.nx.builtin.attribute_assortativity_coefficient", "graphscope.nx.builtin.hits", "pytest.mark.usefixtures", "graphscope.nx.builtin.weakly_connected_components", "networkx.testing.utils.assert_graphs_equal", "graphscope.nx.builtin.triangles", "graphscope.g", "graphscope.nx.Graph", "graphscope.nx.DiGraph", "pytest.raises", "graphscope.nx.builtin.degree_centrality", "graphscope.nx.tests.utils.almost_equal", "graphscope.nx.builtin.node_boundary", "os.path.expandvars", "os.path.join", "os.environ.get", "graphscope.nx.builtin.katz_centrality", "graphscope.nx.builtin.has_path", "graphscope.nx.builtin.pagerank" ]
[((3926, 3971), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (3949, 3971), False, 'import pytest\n'), ((6873, 6918), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (6896, 6918), False, 'import pytest\n'), ((7402, 7447), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (7425, 7447), False, 'import pytest\n'), ((1153, 1204), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (1165, 1204), False, 'import graphscope\n'), ((1685, 1736), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (1697, 1736), False, 'import graphscope\n'), ((2043, 2094), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (2055, 2094), False, 'import graphscope\n'), ((2813, 2864), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (2825, 2864), False, 'import graphscope\n'), ((3581, 3632), 'graphscope.g', 'graphscope.g', ([], {'directed': 'directed', 'generate_eid': '(False)'}), '(directed=directed, generate_eid=False)\n', (3593, 3632), False, 'import graphscope\n'), ((4170, 4215), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/networkx"""'], {}), "('${GS_TEST_DIR}/networkx')\n", (4188, 4215), False, 'import os\n'), ((4278, 4319), 'graphscope.nx.Graph', 'nx.Graph', (['self.k3'], {'default_label': '"""vertex"""'}), "(self.k3, default_label='vertex')\n", (4286, 4319), True, 'import graphscope.nx as nx\n'), ((6232, 6258), 'networkx.testing.utils.assert_graphs_equal', 'assert_graphs_equal', (['G', 'GG'], {}), '(G, GG)\n', (6251, 6258), False, 'from networkx.testing.utils import assert_graphs_equal\n'), ((6287, 6312), 'networkx.testing.utils.assert_graphs_equal', 'assert_graphs_equal', (['H', 'G'], {}), '(H, G)\n', (6306, 6312), False, 'from networkx.testing.utils import assert_graphs_equal\n'), ((7010, 7055), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/networkx"""'], {}), "('${GS_TEST_DIR}/networkx')\n", (7028, 7055), False, 'import os\n'), ((7249, 7292), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['self.k3'], {'default_label': '"""vertex"""'}), "(self.k3, default_label='vertex')\n", (7259, 7292), True, 'import graphscope.nx as nx\n'), ((7355, 7398), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['self.p3'], {'default_label': '"""vertex"""'}), "(self.p3, default_label='vertex')\n", (7365, 7398), True, 'import graphscope.nx as nx\n'), ((7525, 7570), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/networkx"""'], {}), "('${GS_TEST_DIR}/networkx')\n", (7543, 7570), False, 'import os\n'), ((7589, 7625), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (7607, 7625), False, 'import os\n'), ((7814, 7858), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['self.simple'], {'default_label': '"""v-0"""'}), "(self.simple, default_label='v-0')\n", (7824, 7858), True, 'import graphscope.nx as nx\n'), ((9714, 9800), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.SG'], {'source': '(1)', 'weight': '"""weight"""'}), "(self.SG, source=1, weight=\n 'weight')\n", (9759, 9800), True, 'import graphscope.nx as nx\n'), ((10113, 10160), 'graphscope.nx.builtin.weakly_connected_components', 'nx.builtin.weakly_connected_components', (['self.SG'], {}), '(self.SG)\n', (10151, 10160), True, 'import graphscope.nx as nx\n'), ((10274, 10324), 'graphscope.nx.builtin.pagerank', 'nx.builtin.pagerank', (['self.SG'], {'alpha': '(0.9)', 'tol': '(1e-08)'}), '(self.SG, alpha=0.9, tol=1e-08)\n', (10293, 10324), True, 'import graphscope.nx as nx\n'), ((10457, 10492), 'graphscope.nx.builtin.hits', 'nx.builtin.hits', (['self.SG'], {'tol': '(1e-08)'}), '(self.SG, tol=1e-08)\n', (10472, 10492), True, 'import graphscope.nx as nx\n'), ((10697, 10734), 'graphscope.nx.builtin.degree_centrality', 'nx.builtin.degree_centrality', (['self.SG'], {}), '(self.SG)\n', (10725, 10734), True, 'import graphscope.nx as nx\n'), ((10947, 10989), 'graphscope.nx.builtin.eigenvector_centrality', 'nx.builtin.eigenvector_centrality', (['self.SG'], {}), '(self.SG)\n', (10980, 10989), True, 'import graphscope.nx as nx\n'), ((11132, 11167), 'graphscope.nx.builtin.katz_centrality', 'nx.builtin.katz_centrality', (['self.SG'], {}), '(self.SG)\n', (11158, 11167), True, 'import graphscope.nx as nx\n'), ((11303, 11351), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.SG'], {'source': '(1)', 'target': '(6)'}), '(self.SG, source=1, target=6)\n', (11322, 11351), True, 'import graphscope.nx as nx\n'), ((11518, 11566), 'graphscope.nx.builtin.bfs_edges', 'nx.builtin.bfs_edges', (['self.SG', '(1)'], {'depth_limit': '(10)'}), '(self.SG, 1, depth_limit=10)\n', (11538, 11566), True, 'import graphscope.nx as nx\n'), ((11677, 11724), 'graphscope.nx.builtin.bfs_tree', 'nx.builtin.bfs_tree', (['self.SG', '(1)'], {'depth_limit': '(10)'}), '(self.SG, 1, depth_limit=10)\n', (11696, 11724), True, 'import graphscope.nx as nx\n'), ((11816, 11847), 'graphscope.nx.builtin.k_core', 'nx.builtin.k_core', (['self.SG'], {'k': '(1)'}), '(self.SG, k=1)\n', (11833, 11847), True, 'import graphscope.nx as nx\n'), ((11925, 11955), 'graphscope.nx.builtin.clustering', 'nx.builtin.clustering', (['self.SG'], {}), '(self.SG)\n', (11946, 11955), True, 'import graphscope.nx as nx\n'), ((12072, 12101), 'graphscope.nx.builtin.triangles', 'nx.builtin.triangles', (['self.K3'], {}), '(self.K3)\n', (12092, 12101), True, 'import graphscope.nx as nx\n'), ((12197, 12235), 'graphscope.nx.builtin.average_clustering', 'nx.builtin.average_clustering', (['self.SG'], {}), '(self.SG)\n', (12226, 12235), True, 'import graphscope.nx as nx\n'), ((12251, 12283), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ret', '(0.6)'], {'places': '(4)'}), '(ret, 0.6, places=4)\n', (12263, 12283), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((12352, 12404), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.SG'], {}), '(self.SG)\n', (12395, 12404), True, 'import graphscope.nx as nx\n'), ((12420, 12470), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ret', '(-0.25000000000000033)'], {'places': '(12)'}), '(ret, -0.25000000000000033, places=12)\n', (12432, 12470), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((12520, 12561), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['self.SG', '[1, 2]'], {}), '(self.SG, [1, 2])\n', (12544, 12561), True, 'import graphscope.nx as nx\n'), ((12637, 12678), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['self.SG', '[1, 2]'], {}), '(self.SG, [1, 2])\n', (12661, 12678), True, 'import graphscope.nx as nx\n'), ((12787, 12860), 'graphscope.nx.builtin.attribute_assortativity_coefficient', 'nx.builtin.attribute_assortativity_coefficient', (['self.SG'], {'attribute': '"""attr"""'}), "(self.SG, attribute='attr')\n", (12833, 12860), True, 'import graphscope.nx as nx\n'), ((12876, 12926), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ret', '(-0.17647058823529418)'], {'places': '(12)'}), '(ret, -0.17647058823529418, places=12)\n', (12888, 12926), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((12996, 13067), 'graphscope.nx.builtin.numeric_assortativity_coefficient', 'nx.builtin.numeric_assortativity_coefficient', (['self.SG'], {'attribute': '"""attr"""'}), "(self.SG, attribute='attr')\n", (13040, 13067), True, 'import graphscope.nx as nx\n'), ((13083, 13131), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ret', '(0.5383819020581653)'], {'places': '(12)'}), '(ret, 0.5383819020581653, places=12)\n', (13095, 13131), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((1252, 1282), 'os.path.join', 'os.path.join', (['prefix', '"""3v.csv"""'], {}), "(prefix, '3v.csv')\n", (1264, 1282), False, 'import os\n'), ((1784, 1814), 'os.path.join', 'os.path.join', (['prefix', '"""3v.csv"""'], {}), "(prefix, '3v.csv')\n", (1796, 1814), False, 'import os\n'), ((1891, 1930), 'os.path.join', 'os.path.join', (['prefix', '"""p3_directed.csv"""'], {}), "(prefix, 'p3_directed.csv')\n", (1903, 1930), False, 'import os\n'), ((2133, 2171), 'os.path.join', 'os.path.join', (['prefix', '"""simple_v_0.csv"""'], {}), "(prefix, 'simple_v_0.csv')\n", (2145, 2171), False, 'import os\n'), ((2219, 2257), 'os.path.join', 'os.path.join', (['prefix', '"""simple_v_1.csv"""'], {}), "(prefix, 'simple_v_1.csv')\n", (2231, 2257), False, 'import os\n'), ((2311, 2349), 'os.path.join', 'os.path.join', (['prefix', '"""simple_e_0.csv"""'], {}), "(prefix, 'simple_e_0.csv')\n", (2323, 2349), False, 'import os\n'), ((2467, 2505), 'os.path.join', 'os.path.join', (['prefix', '"""simple_e_1.csv"""'], {}), "(prefix, 'simple_e_1.csv')\n", (2479, 2505), False, 'import os\n'), ((2623, 2661), 'os.path.join', 'os.path.join', (['prefix', '"""simple_e_2.csv"""'], {}), "(prefix, 'simple_e_2.csv')\n", (2635, 2661), False, 'import os\n'), ((2903, 2941), 'os.path.join', 'os.path.join', (['prefix', '"""simple_v_0.csv"""'], {}), "(prefix, 'simple_v_0.csv')\n", (2915, 2941), False, 'import os\n'), ((2989, 3027), 'os.path.join', 'os.path.join', (['prefix', '"""simple_v_1.csv"""'], {}), "(prefix, 'simple_v_1.csv')\n", (3001, 3027), False, 'import os\n'), ((3081, 3119), 'os.path.join', 'os.path.join', (['prefix', '"""simple_e_0.csv"""'], {}), "(prefix, 'simple_e_0.csv')\n", (3093, 3119), False, 'import os\n'), ((3237, 3284), 'os.path.join', 'os.path.join', (['prefix', '"""simple_e_1_multiple.csv"""'], {}), "(prefix, 'simple_e_1_multiple.csv')\n", (3249, 3284), False, 'import os\n'), ((3402, 3440), 'os.path.join', 'os.path.join', (['prefix', '"""simple_e_2.csv"""'], {}), "(prefix, 'simple_e_2.csv')\n", (3414, 3440), False, 'import os\n'), ((3680, 3712), 'os.path.join', 'os.path.join', (['prefix', '"""p2p-31.v"""'], {}), "(prefix, 'p2p-31.v')\n", (3692, 3712), False, 'import os\n'), ((3816, 3848), 'os.path.join', 'os.path.join', (['prefix', '"""p2p-31.e"""'], {}), "(prefix, 'p2p-31.e')\n", (3828, 3848), False, 'import os\n'), ((6805, 6836), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError'], {}), '(nx.NetworkXError)\n', (6818, 6836), False, 'import pytest\n'), ((9463, 9564), 'pytest.raises', 'pytest.raises', (['NetworkXError'], {'match': '"""Graph is multigraph, cannot be converted to networkx graph"""'}), "(NetworkXError, match=\n 'Graph is multigraph, cannot be converted to networkx graph')\n", (9476, 9564), False, 'import pytest\n'), ((9614, 9643), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['self.multi_simple'], {}), '(self.multi_simple)\n', (9624, 9643), True, 'import graphscope.nx as nx\n'), ((10366, 10415), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['p[n]', 'self.SG.pagerank[n]'], {'places': '(4)'}), '(p[n], self.SG.pagerank[n], places=4)\n', (10378, 10415), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((10534, 10578), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['h[n]', 'self.SG.hub[n]'], {'places': '(4)'}), '(h[n], self.SG.hub[n], places=4)\n', (10546, 10578), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((10598, 10643), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['a[n]', 'self.SG.auth[n]'], {'places': '(4)'}), '(a[n], self.SG.auth[n], places=4)\n', (10610, 10643), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((11031, 11080), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ret[n]', 'self.SG.eigen[n]'], {'places': '(12)'}), '(ret[n], self.SG.eigen[n], places=12)\n', (11043, 11080), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((11209, 11257), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ret[n]', 'self.SG.katz[n]'], {'places': '(12)'}), '(ret[n], self.SG.katz[n], places=12)\n', (11221, 11257), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((11417, 11465), 'graphscope.nx.builtin.average_shortest_path_length', 'nx.builtin.average_shortest_path_length', (['self.SG'], {}), '(self.SG)\n', (11456, 11465), True, 'import graphscope.nx as nx\n'), ((1384, 1423), 'os.path.join', 'os.path.join', (['prefix', '"""k3_directed.csv"""'], {}), "(prefix, 'k3_directed.csv')\n", (1396, 1423), False, 'import os\n'), ((1533, 1574), 'os.path.join', 'os.path.join', (['prefix', '"""k3_undirected.csv"""'], {}), "(prefix, 'k3_undirected.csv')\n", (1545, 1574), False, 'import os\n'), ((5079, 5113), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (5093, 5113), False, 'import os\n'), ((6850, 6860), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (6858, 6860), True, 'import graphscope.nx as nx\n')]
import networkx.algorithms.shortest_paths.tests.test_dense import networkx.algorithms.shortest_paths.tests.test_dense_numpy import networkx.algorithms.shortest_paths.tests.test_generic import networkx.algorithms.shortest_paths.tests.test_unweighted import networkx.algorithms.shortest_paths.tests.test_weighted import pytest from networkx.algorithms.shortest_paths.tests.test_astar import TestAStar as _TestAStar from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(_TestAStar) class TestAStar(): @pytest.mark.skip(reason="not support class object as node") def test_unorderable_nodes(): pass import_as_graphscope_nx( networkx.algorithms.shortest_paths.tests.test_dense, decorators=pytest.mark.usefixtures("graphscope_session")) import_as_graphscope_nx( networkx.algorithms.shortest_paths.tests.test_dense_numpy, decorators=pytest.mark.usefixtures("graphscope_session")) import_as_graphscope_nx( networkx.algorithms.shortest_paths.tests.test_generic, decorators=pytest.mark.usefixtures("graphscope_session")) import_as_graphscope_nx( networkx.algorithms.shortest_paths.tests.test_unweighted, decorators=pytest.mark.usefixtures("graphscope_session")) import_as_graphscope_nx( networkx.algorithms.shortest_paths.tests.test_weighted, decorators=pytest.mark.usefixtures("graphscope_session")) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestAverageShortestPathLength) class TestAverageShortestPathLength(): @pytest.mark.skip(reason="builtin app would not raise Error during compute") def test_disconnected(): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((546, 591), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (569, 591), False, 'import pytest\n'), ((593, 631), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_TestAStar'], {}), '(_TestAStar)\n', (619, 631), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1507, 1552), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1530, 1552), False, 'import pytest\n'), ((1554, 1611), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestAverageShortestPathLength'], {}), '(TestAverageShortestPathLength)\n', (1580, 1611), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((656, 715), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support class object as node"""'}), "(reason='not support class object as node')\n", (672, 715), False, 'import pytest\n'), ((1656, 1731), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""builtin app would not raise Error during compute"""'}), "(reason='builtin app would not raise Error during compute')\n", (1672, 1731), False, 'import pytest\n'), ((861, 906), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (884, 906), False, 'import pytest\n'), ((1012, 1057), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1035, 1057), False, 'import pytest\n'), ((1159, 1204), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1182, 1204), False, 'import pytest\n'), ((1309, 1354), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1332, 1354), False, 'import pytest\n'), ((1457, 1502), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1480, 1502), False, 'import pytest\n')]
import networkx.algorithms.tests.test_dominance import pytest from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tests.test_dominance, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tests.test_dominance import TestDominanceFrontiers from networkx.algorithms.tests.test_dominance import TestImmediateDominators @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestDominanceFrontiers) class TestDominanceFrontiers: def test_exceptions(self): G = nx.Graph() G.add_node(0) pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) G = nx.DiGraph([[0, 0]]) pytest.raises(nx.NetworkXError, nx.dominance_frontiers, G, 1) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestImmediateDominators) class TestImmediateDominators: def test_exceptions(self): G = nx.Graph() G.add_node(0) pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) G = nx.DiGraph([[0, 0]]) pytest.raises(nx.NetworkXError, nx.immediate_dominators, G, 1)
[ "graphscope.experimental.nx.utils.compat.with_graphscope_nx_context", "pytest.raises", "pytest.mark.usefixtures" ]
[((524, 569), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (547, 569), False, 'import pytest\n'), ((571, 621), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestDominanceFrontiers'], {}), '(TestDominanceFrontiers)\n', (597, 621), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((913, 958), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (936, 958), False, 'import pytest\n'), ((960, 1011), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestImmediateDominators'], {}), '(TestImmediateDominators)\n', (986, 1011), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((320, 365), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (343, 365), False, 'import pytest\n'), ((736, 806), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'nx.dominance_frontiers', 'G', '(0)'], {}), '(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0)\n', (749, 806), False, 'import pytest\n'), ((848, 909), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.dominance_frontiers', 'G', '(1)'], {}), '(nx.NetworkXError, nx.dominance_frontiers, G, 1)\n', (861, 909), False, 'import pytest\n'), ((1127, 1198), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented', 'nx.immediate_dominators', 'G', '(0)'], {}), '(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0)\n', (1140, 1198), False, 'import pytest\n'), ((1240, 1302), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.immediate_dominators', 'G', '(1)'], {}), '(nx.NetworkXError, nx.immediate_dominators, G, 1)\n', (1253, 1302), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import base64 import collections import json try: from graphlearn import Graph as GLGraph except ImportError: GLGraph = object from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.errors import check_argument class Graph(GLGraph): def __init__(self, handle, config=None, object_id=None, graphscope_session=None): """Initialize a graph for the learning engine using a handle.""" handle = self.decode_arg(handle) config = self.decode_arg(config) if config is None: if "config" in handle: config = handle["config"] if config is None: config = collections.defaultdict(lambda: dict) if object_id is None: object_id = handle["vineyard_id"] self.handle = handle self.config = config self.object_id = object_id self.closed = False self.graphscope_session = graphscope_session super(Graph, self).__init__() self.vineyard(handle, config["nodes"], config["edges"]) for label, node_attr in config["node_attributes"].items(): n_ints, n_floats, n_strings = ( node_attr[1][0], node_attr[1][1], node_attr[1][2], ) self.node_attributes(label, node_attr[0], n_ints, n_floats, n_strings) for label, edge_attr in config["edge_attributes"].items(): n_ints, n_floats, n_strings = ( edge_attr[1][0], edge_attr[1][1], edge_attr[1][2], ) self.edge_attributes(label, edge_attr[0], n_ints, n_floats, n_strings) for node_view_label, node_label, nsplit, split_range in config["gen_labels"]: self.node_view( node_view_label, node_label, nsplit=nsplit, split_range=split_range ) self.init_vineyard(worker_index=0, worker_count=1) def decode_arg(self, arg): if arg is None or isinstance(arg, dict): return arg return json.loads(base64.b64decode(arg.encode("utf-8")).decode("utf-8")) def close(self): if not self.closed: self.closed = True if self.graphscope_session is not None: self.graphscope_session.close_learning_instance(self) super(Graph, self).close() @staticmethod def preprocess_args(handle, nodes, edges, gen_labels): # noqa: C901 handle = json.loads(base64.b64decode(handle).decode("utf-8", errors="ignore")) node_names = [] node_attributes = {} edge_names = [] edge_attributes = {} def selected_property_schema(attr_types, attributes): prop_counts = collections.defaultdict(lambda: 0) for attr in attributes: prop_counts[attr_types[attr]] += 1 return [prop_counts["i"], prop_counts["f"], prop_counts["s"]] if nodes is not None: for node in nodes: if isinstance(node, str): if node in node_names: raise InvalidArgumentError("Duplicate node type: %s" % node) node_names.append(node) elif isinstance(node, tuple): if node[0] in node_names: raise InvalidArgumentError("Duplicate node type: %s" % node[0]) node_names.append(node[0]) attr_types = handle["node_attribute_types"][node[0]] attr_schema = selected_property_schema(attr_types, node[1]) node_attributes[node[0]] = (node[1], attr_schema) else: raise InvalidArgumentError( "The node parameter is in bad format: %s" % node ) else: for node in handle["node_schema"]: node_names.append(node.split(":")[0]) if edges is not None: for edge in edges: if isinstance(edge, str): if len(node_names) > 1: raise InvalidArgumentError( "Cannot inference edge type when multiple kinds of nodes exists" ) edge_names.append((node_names[0], edge, node_names[0])) elif ( isinstance(edge, tuple) and isinstance(edge[0], str) and isinstance(edge[1], str) ): edge_names.append(edge) elif ( isinstance(edge, tuple) and isinstance(edge[0], str) and isinstance(edge[1], list) ): if len(node_names) > 1: raise InvalidArgumentError( "Cannot inference edge type when multiple kinds of nodes exists" ) edge_names.append((node_names[0], edge[0], node_names[0])) attr_types = handle["edge_attribute_types"][edge[0]] attr_schema = selected_property_schema(attr_types, edge[1]) edge_attributes[edge[0]] = (edge[1], attr_schema) elif ( isinstance(edge, tuple) and isinstance(edge[0], (list, tuple)) and isinstance(edge[1], list) ): edge_names.append(edge[0]) attr_types = handle["edge_attribute_types"][edge[0][1]] attr_schema = selected_property_schema(attr_types, edge[1]) edge_attributes[edge[0][1]] = (edge[1], attr_schema) else: raise InvalidArgumentError( "The edge parameter is in bad format: %s" % edge ) split_groups = collections.defaultdict(list) if gen_labels is not None: for label in gen_labels: if len(label) == 3 or len(label) == 4: split_groups[label[1]].append(label) else: raise InvalidArgumentError( "Bad gen_labels arguments: %s" % gen_labels ) split_labels = [] for label, group in split_groups.items(): lengths = [len(split) for split in group] check_argument( lengths[:-1] == lengths[1:], "Invalid gen labels: %s" % group ) if len(group[0]) == 3: length_sum = sum(split[2] for split in group) s, ss = 0, [] for split in group: ss.append((s, s + split[2])) s += split[2] group = [ (split[0], split[1], length_sum, s) for split, s in zip(group, ss) ] for split in group: split_labels.append(split) return { "nodes": node_names if node_names else None, "edges": edge_names if edge_names else None, "node_attributes": node_attributes, "edge_attributes": edge_attributes, "gen_labels": split_labels, } def get_handle(self, worker_count=1): """Return a base64-encoded handle for distributed training.""" handle_copy = self.handle.copy() handle_copy["client_count"] = worker_count return base64.b64encode(json.dumps(handle_copy).encode("utf-8")).decode("utf-8") def V(self, t, feed=None): """Entry of Gremlin-like query. Start from node. Args: t (string): The type of node which is the entry of query or the type of edge when node is from edge source or dst. feed (None| numpy.ndarray | types.GeneratorType | `Nodes`): When `feed` is not `None`, the `type` should be a node type, which means query the attributes of the specified node ids. None: Default. Sample nodes with the following .shuffle and .batch API. numpy.ndarray: Any shape of ids. Get nodes of the given ids and node_type. types.Generator: A generator of numpy.ndarray. Get nodes of generated ids and given node_type. `Nodes`: A `Nodes` object. Return: A 'Query' object. Example: .. code:: python >>> import numpy as np >>> g.V("user").shuffle().batch(64) >>> g.V("user", feed=np.array([1, 2, 3])) >>> def gen(): >>> while True: >>> yield np.array([1, 2, 3]) >>> gen = gen() >>> g.V("user", feed=gen) """ return super(Graph, self).V(t, feed) def E(self, edge_type, feed=None, reverse=False): """Entry of Gremlin-like query. Start from edge. Args: edge_type (string): The type of edge which is the entry of query. feed (None| (np.ndarray, np.ndarray) | types.GeneratorType | `Edges`): None: Default. Sample edges with the following .shuffle and .batch API. (np.ndarray, np.ndarray): src_ids, dst_ids. Get edges of the given (src_ids, dst_ids) and given edge_type. src_ids and dst_ids must be the same shape, dtype is int. types.Generator: A generator of (numpy.ndarray, numpy.ndarray). Get edges of generated (src_ids, dst_ids) and given edge_type. `Edges`: An `Edges` object. Return: A 'Query' object. Example: .. code:: python >>> import numpy as np >>> g.E("buy").shuffle().batch(64) >>> g.E("buy", feed=(np.array([1, 2, 3]), np.array([4, 5, 6])) >>> def gen(): >>> while True: >>> yield (np.array([1, 2, 3]), np.array([4, 5, 6])) >>> gen = gen() >>> g.E("buy", feed=gen) """ return super(Graph, self).E(edge_type, feed, reverse)
[ "graphscope.framework.errors.InvalidArgumentError", "json.dumps", "base64.b64decode", "collections.defaultdict", "graphscope.framework.errors.check_argument" ]
[((6606, 6635), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (6629, 6635), False, 'import collections\n'), ((1338, 1376), 'collections.defaultdict', 'collections.defaultdict', (['(lambda : dict)'], {}), '(lambda : dict)\n', (1361, 1376), False, 'import collections\n'), ((3419, 3454), 'collections.defaultdict', 'collections.defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (3442, 3454), False, 'import collections\n'), ((7123, 7200), 'graphscope.framework.errors.check_argument', 'check_argument', (['(lengths[:-1] == lengths[1:])', "('Invalid gen labels: %s' % group)"], {}), "(lengths[:-1] == lengths[1:], 'Invalid gen labels: %s' % group)\n", (7137, 7200), False, 'from graphscope.framework.errors import check_argument\n'), ((3165, 3189), 'base64.b64decode', 'base64.b64decode', (['handle'], {}), '(handle)\n', (3181, 3189), False, 'import base64\n'), ((6868, 6933), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('Bad gen_labels arguments: %s' % gen_labels)"], {}), "('Bad gen_labels arguments: %s' % gen_labels)\n", (6888, 6933), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((3792, 3846), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('Duplicate node type: %s' % node)"], {}), "('Duplicate node type: %s' % node)\n", (3812, 3846), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((4389, 4459), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('The node parameter is in bad format: %s' % node)"], {}), "('The node parameter is in bad format: %s' % node)\n", (4409, 4459), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((4799, 4890), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Cannot inference edge type when multiple kinds of nodes exists"""'], {}), "(\n 'Cannot inference edge type when multiple kinds of nodes exists')\n", (4819, 4890), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((4013, 4070), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('Duplicate node type: %s' % node[0])"], {}), "('Duplicate node type: %s' % node[0])\n", (4033, 4070), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((8199, 8222), 'json.dumps', 'json.dumps', (['handle_copy'], {}), '(handle_copy)\n', (8209, 8222), False, 'import json\n'), ((5503, 5594), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (['"""Cannot inference edge type when multiple kinds of nodes exists"""'], {}), "(\n 'Cannot inference edge type when multiple kinds of nodes exists')\n", (5523, 5594), False, 'from graphscope.framework.errors import InvalidArgumentError\n'), ((6465, 6535), 'graphscope.framework.errors.InvalidArgumentError', 'InvalidArgumentError', (["('The edge parameter is in bad format: %s' % edge)"], {}), "('The edge parameter is in bad format: %s' % edge)\n", (6485, 6535), False, 'from graphscope.framework.errors import InvalidArgumentError\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import os from pathlib import Path import numpy as np import pandas as pd import pytest from graphscope import JavaApp from graphscope.framework.app import load_app @pytest.fixture(scope="module") def not_exist_jar(): path = os.path.join("not_exist_dir", "not_exist.jar") return path @pytest.fixture(scope="module") def not_jar_file(): return os.path.expandvars("${GS_TEST_DIR}/p2p-31.e") @pytest.fixture(scope="module") def a_gar_file(): return os.path.expandvars("${GS_TEST_DIR}/gars/sssp_pie.gar") @pytest.fixture(scope="module") def empty_jar(): return os.path.expandvars("${GS_TEST_DIR}/jars/empty.jar") @pytest.fixture(scope="module") def demo_jar(): return os.path.expandvars("${USER_JAR_PATH}") @pytest.fixture(scope="module") def projected_graph_sssp_class(): return "com.alibaba.graphscope.example.sssp.SSSP" @pytest.fixture(scope="module") def non_exist_java_class(): return "com.alibaba.graphscope.example.non.existing.java.class" @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_non_existing_jar( not_exist_jar, projected_graph_sssp_class, non_exist_java_class ): with pytest.raises(FileNotFoundError): sssp = JavaApp(not_exist_jar, projected_graph_sssp_class) with pytest.raises(FileNotFoundError): sssp = JavaApp(not_exist_jar, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_not_a_jar(not_jar_file, projected_graph_sssp_class, non_exist_java_class): with pytest.raises(KeyError): sssp = JavaApp(not_jar_file, projected_graph_sssp_class) with pytest.raises(KeyError): sssp = JavaApp(not_jar_file, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_gar_file(a_gar_file, projected_graph_sssp_class, non_exist_java_class): with pytest.raises(KeyError): sssp = JavaApp(a_gar_file, projected_graph_sssp_class) with pytest.raises(KeyError): sssp = JavaApp(a_gar_file, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_empty_jar(empty_jar, projected_graph_sssp_class, non_exist_java_class): with pytest.raises(KeyError): sssp = JavaApp(empty_jar, projected_graph_sssp_class) with pytest.raises(KeyError): sssp = JavaApp(empty_jar, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_correct_jar(projected_graph_sssp_class, demo_jar): sssp = JavaApp(demo_jar, projected_graph_sssp_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_sssp_property_vertex_data( demo_jar, graphscope_session, p2p_project_directed_graph, projected_graph_sssp_class, ): sssp = JavaApp(full_jar_path=demo_jar, java_app_class=projected_graph_sssp_class) sssp(p2p_project_directed_graph, src=6, threadNum=1) def projected_p2p_graph_loaded_by_giraph( graphscope_session, demo_jar, vformat, eformat ): graphscope_session.add_lib(demo_jar) graph = graphscope_session.load_from( vertices=os.path.expandvars("${GS_TEST_DIR}/p2p-31.v"), vformat=vformat, edges=os.path.expandvars("${GS_TEST_DIR}/p2p-31.e"), eformat=eformat, ) graph = graph._project_to_simple(v_prop="vdata", e_prop="data") return graph # also test a giraph app @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_giraph_app( demo_jar, graphscope_session, projected_graph_sssp_class, ): graphscope_session.add_lib(demo_jar) vformat = "giraph:com.alibaba.graphscope.example.giraph.format.P2PVertexInputFormat" eformat = "giraph:com.alibaba.graphscope.example.giraph.format.P2PEdgeInputFormat" g = projected_p2p_graph_loaded_by_giraph( graphscope_session, demo_jar, vformat, eformat ) giraph_sssp = load_app(algo="giraph:com.alibaba.graphscope.example.giraph.SSSP") giraph_sssp(g, sourceId=6) del g
[ "graphscope.framework.app.load_app", "os.path.expandvars", "os.path.join", "os.environ.get", "pytest.raises", "pytest.fixture", "graphscope.JavaApp" ]
[((852, 882), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (866, 882), False, 'import pytest\n'), ((981, 1011), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (995, 1011), False, 'import pytest\n'), ((1092, 1122), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1106, 1122), False, 'import pytest\n'), ((1210, 1240), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1224, 1240), False, 'import pytest\n'), ((1324, 1354), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1338, 1354), False, 'import pytest\n'), ((1424, 1454), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1438, 1454), False, 'import pytest\n'), ((1546, 1576), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1560, 1576), False, 'import pytest\n'), ((915, 961), 'os.path.join', 'os.path.join', (['"""not_exist_dir"""', '"""not_exist.jar"""'], {}), "('not_exist_dir', 'not_exist.jar')\n", (927, 961), False, 'import os\n'), ((1043, 1088), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/p2p-31.e"""'], {}), "('${GS_TEST_DIR}/p2p-31.e')\n", (1061, 1088), False, 'import os\n'), ((1152, 1206), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/gars/sssp_pie.gar"""'], {}), "('${GS_TEST_DIR}/gars/sssp_pie.gar')\n", (1170, 1206), False, 'import os\n'), ((1269, 1320), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/jars/empty.jar"""'], {}), "('${GS_TEST_DIR}/jars/empty.jar')\n", (1287, 1320), False, 'import os\n'), ((1382, 1420), 'os.path.expandvars', 'os.path.expandvars', (['"""${USER_JAR_PATH}"""'], {}), "('${USER_JAR_PATH}')\n", (1400, 1420), False, 'import os\n'), ((3506, 3551), 'graphscope.JavaApp', 'JavaApp', (['demo_jar', 'projected_graph_sssp_class'], {}), '(demo_jar, projected_graph_sssp_class)\n', (3513, 3551), False, 'from graphscope import JavaApp\n'), ((3827, 3901), 'graphscope.JavaApp', 'JavaApp', ([], {'full_jar_path': 'demo_jar', 'java_app_class': 'projected_graph_sssp_class'}), '(full_jar_path=demo_jar, java_app_class=projected_graph_sssp_class)\n', (3834, 3901), False, 'from graphscope import JavaApp\n'), ((4991, 5057), 'graphscope.framework.app.load_app', 'load_app', ([], {'algo': '"""giraph:com.alibaba.graphscope.example.giraph.SSSP"""'}), "(algo='giraph:com.alibaba.graphscope.example.giraph.SSSP')\n", (4999, 5057), False, 'from graphscope.framework.app import load_app\n'), ((1908, 1940), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (1921, 1940), False, 'import pytest\n'), ((1957, 2007), 'graphscope.JavaApp', 'JavaApp', (['not_exist_jar', 'projected_graph_sssp_class'], {}), '(not_exist_jar, projected_graph_sssp_class)\n', (1964, 2007), False, 'from graphscope import JavaApp\n'), ((2017, 2049), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (2030, 2049), False, 'import pytest\n'), ((2066, 2110), 'graphscope.JavaApp', 'JavaApp', (['not_exist_jar', 'non_exist_java_class'], {}), '(not_exist_jar, non_exist_java_class)\n', (2073, 2110), False, 'from graphscope import JavaApp\n'), ((1700, 1732), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (1714, 1732), False, 'import os\n'), ((2332, 2355), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2345, 2355), False, 'import pytest\n'), ((2372, 2421), 'graphscope.JavaApp', 'JavaApp', (['not_jar_file', 'projected_graph_sssp_class'], {}), '(not_jar_file, projected_graph_sssp_class)\n', (2379, 2421), False, 'from graphscope import JavaApp\n'), ((2431, 2454), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2444, 2454), False, 'import pytest\n'), ((2471, 2514), 'graphscope.JavaApp', 'JavaApp', (['not_jar_file', 'non_exist_java_class'], {}), '(not_jar_file, non_exist_java_class)\n', (2478, 2514), False, 'from graphscope import JavaApp\n'), ((2138, 2170), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (2152, 2170), False, 'import os\n'), ((2733, 2756), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2746, 2756), False, 'import pytest\n'), ((2773, 2820), 'graphscope.JavaApp', 'JavaApp', (['a_gar_file', 'projected_graph_sssp_class'], {}), '(a_gar_file, projected_graph_sssp_class)\n', (2780, 2820), False, 'from graphscope import JavaApp\n'), ((2830, 2853), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2843, 2853), False, 'import pytest\n'), ((2870, 2911), 'graphscope.JavaApp', 'JavaApp', (['a_gar_file', 'non_exist_java_class'], {}), '(a_gar_file, non_exist_java_class)\n', (2877, 2911), False, 'from graphscope import JavaApp\n'), ((2542, 2574), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (2556, 2574), False, 'import os\n'), ((3130, 3153), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (3143, 3153), False, 'import pytest\n'), ((3170, 3216), 'graphscope.JavaApp', 'JavaApp', (['empty_jar', 'projected_graph_sssp_class'], {}), '(empty_jar, projected_graph_sssp_class)\n', (3177, 3216), False, 'from graphscope import JavaApp\n'), ((3226, 3249), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (3239, 3249), False, 'import pytest\n'), ((3266, 3306), 'graphscope.JavaApp', 'JavaApp', (['empty_jar', 'non_exist_java_class'], {}), '(empty_jar, non_exist_java_class)\n', (3273, 3306), False, 'from graphscope import JavaApp\n'), ((2939, 2971), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (2953, 2971), False, 'import os\n'), ((3334, 3366), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (3348, 3366), False, 'import os\n'), ((3579, 3611), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (3593, 3611), False, 'import os\n'), ((4458, 4490), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (4472, 4490), False, 'import os\n'), ((4157, 4202), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/p2p-31.v"""'], {}), "('${GS_TEST_DIR}/p2p-31.v')\n", (4175, 4202), False, 'import os\n'), ((4243, 4288), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/p2p-31.e"""'], {}), "('${GS_TEST_DIR}/p2p-31.e')\n", (4261, 4288), False, 'import os\n')]
"""Generators - Small ===================== Some small graphs """ import pytest from networkx.generators.tests.test_small import TestGeneratorsSmall from graphscope.framework.errors import UnimplementedError from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGeneratorsSmall) class TestGeneratorsSmall: def test_properties_named_small_graphs(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((281, 326), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (304, 326), False, 'import pytest\n'), ((328, 375), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGeneratorsSmall'], {}), '(TestGeneratorsSmall)\n', (354, 375), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
import networkx.algorithms.assortativity.tests.test_pairs import pytest from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.assortativity.tests.test_pairs, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.assortativity.tests.test_pairs import TestAttributeMixingXY from networkx.algorithms.assortativity.tests.test_pairs import TestDegreeMixingXY from .base_test import BaseTestAttributeMixing from .base_test import BaseTestDegreeMixing @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestAttributeMixingXY) class TestAttributeMixingXY(BaseTestAttributeMixing): @pytest.mark.skip(reason="not support multigraph") def test_node_attribute_xy_multigraph(self): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestDegreeMixingXY) class TestDegreeMixingXY(BaseTestDegreeMixing): @pytest.mark.skip(reason="not support multigraph") def test_node_degree_xy_multigraph(self): pass
[ "graphscope.experimental.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((650, 695), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (673, 695), False, 'import pytest\n'), ((697, 746), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestAttributeMixingXY'], {}), '(TestAttributeMixingXY)\n', (723, 746), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((921, 966), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (944, 966), False, 'import pytest\n'), ((968, 1014), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestDegreeMixingXY'], {}), '(TestDegreeMixingXY)\n', (994, 1014), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((806, 855), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (822, 855), False, 'import pytest\n'), ((1068, 1117), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1084, 1117), False, 'import pytest\n'), ((340, 385), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (363, 385), False, 'import pytest\n')]
import pytest from graphscope import nx from graphscope.nx.tests.utils import replace_with_inf class TestRunGenericPath: def setup_method(self): self.edges = [(0, 1), (0, 2), (1, 2), (2, 3), (1, 4)] G = nx.Graph() G.add_edges_from(self.edges, weight=1) DG = nx.DiGraph() DG.add_edges_from(self.edges, weight=1) self.G = G self.DG = DG def teardown_method(self): del self.G del self.edges def test_run_shortest_path(self): nx.builtin.shortest_path(self.G, source=0, weight="weight") def test_run_shortest_path_length(self): nx.builtin.single_source_dijkstra_path_length(self.G, source=0, weight="weight") def test_run_average_shortest_path_length(self): nx.builtin.average_shortest_path_length(self.G, weight="weight") def test_run_has_path(self): nx.builtin.has_path(self.G, source=0, target=3) def test_shortest_path_length_on_reverse_view(self): ret1 = nx.builtin.single_source_dijkstra_path_length( self.DG, source=2, weight="weight" ) assert replace_with_inf(dict(ret1.values)) == { 0.0: float("inf"), 1.0: float("inf"), 2.0: 0.0, 3.0: 1.0, 4.0: float("inf"), } RDG = self.DG.reverse(copy=False) ret2 = nx.builtin.single_source_dijkstra_path_length( RDG, source=2, weight="weight" ) assert replace_with_inf(dict(ret2.values)) == { 0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: float("inf"), 4.0: float("inf"), } def test_shortest_path_length_on_directed_view(self): ret1 = nx.builtin.single_source_dijkstra_path_length( self.G, source=2, weight="weight" ) assert dict(ret1.values) == {0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: 1.0, 4.0: 2.0} DG = self.G.to_directed(as_view=True) ret2 = nx.builtin.single_source_dijkstra_path_length( DG, source=2, weight="weight" ) assert dict(ret2.values) == {0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: 1.0, 4.0: 2.0}
[ "graphscope.nx.builtin.shortest_path", "graphscope.nx.DiGraph", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.builtin.has_path", "graphscope.nx.builtin.average_shortest_path_length", "graphscope.nx.Graph" ]
[((226, 236), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (234, 236), False, 'from graphscope import nx\n'), ((297, 309), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (307, 309), False, 'from graphscope import nx\n'), ((519, 578), 'graphscope.nx.builtin.shortest_path', 'nx.builtin.shortest_path', (['self.G'], {'source': '(0)', 'weight': '"""weight"""'}), "(self.G, source=0, weight='weight')\n", (543, 578), False, 'from graphscope import nx\n'), ((633, 718), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.G'], {'source': '(0)', 'weight': '"""weight"""'}), "(self.G, source=0, weight='weight'\n )\n", (678, 718), False, 'from graphscope import nx\n'), ((776, 840), 'graphscope.nx.builtin.average_shortest_path_length', 'nx.builtin.average_shortest_path_length', (['self.G'], {'weight': '"""weight"""'}), "(self.G, weight='weight')\n", (815, 840), False, 'from graphscope import nx\n'), ((883, 930), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.G'], {'source': '(0)', 'target': '(3)'}), '(self.G, source=0, target=3)\n', (902, 930), False, 'from graphscope import nx\n'), ((1004, 1090), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.DG'], {'source': '(2)', 'weight': '"""weight"""'}), "(self.DG, source=2, weight=\n 'weight')\n", (1049, 1090), False, 'from graphscope import nx\n'), ((1368, 1445), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['RDG'], {'source': '(2)', 'weight': '"""weight"""'}), "(RDG, source=2, weight='weight')\n", (1413, 1445), False, 'from graphscope import nx\n'), ((1736, 1821), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.G'], {'source': '(2)', 'weight': '"""weight"""'}), "(self.G, source=2, weight='weight'\n )\n", (1781, 1821), False, 'from graphscope import nx\n'), ((1987, 2063), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['DG'], {'source': '(2)', 'weight': '"""weight"""'}), "(DG, source=2, weight='weight')\n", (2032, 2063), False, 'from graphscope import nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pytest import graphscope.nx as nx class TestRandomClusteredGraph: def test_valid(self): node = [1, 1, 1, 2, 1, 2, 0, 0] tri = [0, 0, 0, 0, 0, 1, 1, 1] joint_degree_sequence = zip(node, tri) G = nx.random_clustered_graph(joint_degree_sequence) assert G.number_of_nodes() == 8 assert G.number_of_edges() == 7 def test_valid2(self): G = nx.random_clustered_graph([(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)]) assert G.number_of_nodes() == 6 assert G.number_of_edges() == 10 def test_invalid1(self): pytest.raises( (TypeError, nx.NetworkXError), nx.random_clustered_graph, [[1, 1], [2, 1], [0, 1]], ) def test_invalid2(self): pytest.raises( (TypeError, nx.NetworkXError), nx.random_clustered_graph, [[1, 1], [1, 2], [0, 1]], )
[ "graphscope.nx.random_clustered_graph", "pytest.raises" ]
[((640, 688), 'graphscope.nx.random_clustered_graph', 'nx.random_clustered_graph', (['joint_degree_sequence'], {}), '(joint_degree_sequence)\n', (665, 688), True, 'import graphscope.nx as nx\n'), ((809, 884), 'graphscope.nx.random_clustered_graph', 'nx.random_clustered_graph', (['[(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)]'], {}), '([(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)])\n', (834, 884), True, 'import graphscope.nx as nx\n'), ((1004, 1105), 'pytest.raises', 'pytest.raises', (['(TypeError, nx.NetworkXError)', 'nx.random_clustered_graph', '[[1, 1], [2, 1], [0, 1]]'], {}), '((TypeError, nx.NetworkXError), nx.random_clustered_graph, [[1,\n 1], [2, 1], [0, 1]])\n', (1017, 1105), False, 'import pytest\n'), ((1187, 1288), 'pytest.raises', 'pytest.raises', (['(TypeError, nx.NetworkXError)', 'nx.random_clustered_graph', '[[1, 1], [1, 2], [0, 1]]'], {}), '((TypeError, nx.NetworkXError), nx.random_clustered_graph, [[1,\n 1], [1, 2], [0, 1]])\n', (1200, 1288), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from graphscope.deploy.kubernetes.resource_builder import ReplicaSetBuilder def test_replica_set_builder(): labels = { "app.kubernetes.io/name": "graphscope", "app.kubernetes.io/component": "engine", } engine_builder = ReplicaSetBuilder( name="engine", labels=labels, replicas=2, image_pull_policy=None, ) result = engine_builder.build() assert result["spec"]["template"]["metadata"]["annotations"] == {} name = "kubectl.kubernetes.io/default-container" engine_builder.add_annotation(name, "engine") result = engine_builder.build() assert result["spec"]["template"]["metadata"]["annotations"][name] == "engine"
[ "graphscope.deploy.kubernetes.resource_builder.ReplicaSetBuilder" ]
[((916, 1003), 'graphscope.deploy.kubernetes.resource_builder.ReplicaSetBuilder', 'ReplicaSetBuilder', ([], {'name': '"""engine"""', 'labels': 'labels', 'replicas': '(2)', 'image_pull_policy': 'None'}), "(name='engine', labels=labels, replicas=2,\n image_pull_policy=None)\n", (933, 1003), False, 'from graphscope.deploy.kubernetes.resource_builder import ReplicaSetBuilder\n')]
import pytest from networkx.algorithms.assortativity import degree_mixing_dict # fmt: off from networkx.generators.tests.test_joint_degree_seq import \ test_is_valid_directed_joint_degree from networkx.generators.tests.test_joint_degree_seq import test_is_valid_joint_degree from networkx.generators.tests.test_joint_degree_seq import test_joint_degree_graph from graphscope.nx.generators import gnm_random_graph from graphscope.nx.generators import powerlaw_cluster_graph from graphscope.nx.generators.joint_degree_seq import directed_joint_degree_graph from graphscope.nx.generators.joint_degree_seq import is_valid_directed_joint_degree from graphscope.nx.generators.joint_degree_seq import is_valid_joint_degree from graphscope.nx.generators.joint_degree_seq import joint_degree_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_is_valid_joint_degree) def test_is_valid_joint_degree(): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_joint_degree_graph) def test_joint_degree_graph(): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_is_valid_directed_joint_degree) def test_is_valid_directed_joint_degree(): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((864, 909), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (887, 909), False, 'import pytest\n'), ((911, 965), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_is_valid_joint_degree'], {}), '(test_is_valid_joint_degree)\n', (937, 965), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1012, 1057), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1035, 1057), False, 'import pytest\n'), ((1059, 1110), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_joint_degree_graph'], {}), '(test_joint_degree_graph)\n', (1085, 1110), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1154, 1199), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1177, 1199), False, 'import pytest\n'), ((1201, 1264), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_is_valid_directed_joint_degree'], {}), '(test_is_valid_directed_joint_degree)\n', (1227, 1264), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import numpy as np import pandas as pd import pytest import graphscope from graphscope.framework.errors import AnalyticalEngineInternalError from graphscope.framework.loader import Loader @pytest.fixture def p2p_31_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property")): return "vineyard://%s/p2p-31_property_e_0#header_row=true&delimiter=," % data_dir @pytest.fixture def p2p_31_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property")): return "vineyard://%s/p2p-31_property_v_0#header_row=true&delimiter=," % data_dir @pytest.fixture def lesson_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/lesson.v#header_row=true&delimiter=," % data_dir @pytest.fixture def student_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/student.v#header_row=true&delimiter=," % data_dir @pytest.fixture def teacher_v(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/teacher.v#header_row=true&delimiter=," % data_dir @pytest.fixture def score_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/score.e#header_row=true&delimiter=," % data_dir @pytest.fixture def student_teacher_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/student_teacher.e#header_row=true&delimiter=," % data_dir @pytest.fixture def teacher_lesson_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/teacher_lesson.e#header_row=true&delimiter=," % data_dir @pytest.fixture def student_group_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/group.e#header_row=true&delimiter=," % data_dir @pytest.fixture def teacher_group_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/teacher_group.e#header_row=true&delimiter=," % data_dir @pytest.fixture def friend_e(data_dir=os.path.expandvars("${GS_TEST_DIR}/property_graph")): return "vineyard://%s/friend.e#header_row=true&delimiter=," % data_dir @pytest.mark.skip("requires vineyard's io adaptors installed properly") def test_p2p_form_loader(graphscope_session, p2p_31_e, p2p_31_v): g = graphscope_session.load_from( edges={ "group": { "loader": Loader(p2p_31_e, session=graphscope_session), } }, vertices={ "student": { "loader": Loader(p2p_31_v, session=graphscope_session), } }, ) @pytest.mark.skip("requires vineyard's io adaptors installed properly") def test_dict_in_dict_form_loader(graphscope_session, student_group_e, student_v): g = graphscope_session.load_from( edges={ "group": { "loader": Loader(student_group_e, session=graphscope_session), "properties": ["member_size"], "source": ("leader_student_id", "student"), "destination": ("member_student_id", "student"), "load_strategy": "both_out_in", } }, vertices={ "student": { "loader": Loader(student_v, session=graphscope_session), "properties": ["name", "lesson_nums", "avg_score"], "vid": "student_id", } }, )
[ "graphscope.framework.loader.Loader", "pytest.mark.skip", "os.path.expandvars" ]
[((2795, 2865), 'pytest.mark.skip', 'pytest.mark.skip', (['"""requires vineyard\'s io adaptors installed properly"""'], {}), '("requires vineyard\'s io adaptors installed properly")\n', (2811, 2865), False, 'import pytest\n'), ((3256, 3326), 'pytest.mark.skip', 'pytest.mark.skip', (['"""requires vineyard\'s io adaptors installed properly"""'], {}), '("requires vineyard\'s io adaptors installed properly")\n', (3272, 3326), False, 'import pytest\n'), ((907, 952), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property"""'], {}), "('${GS_TEST_DIR}/property')\n", (925, 952), False, 'import os\n'), ((1081, 1126), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property"""'], {}), "('${GS_TEST_DIR}/property')\n", (1099, 1126), False, 'import os\n'), ((1255, 1306), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1273, 1306), False, 'import os\n'), ((1425, 1476), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1443, 1476), False, 'import os\n'), ((1596, 1647), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1614, 1647), False, 'import os\n'), ((1765, 1816), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1783, 1816), False, 'import os\n'), ((1942, 1993), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (1960, 1993), False, 'import os\n'), ((2128, 2179), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2146, 2179), False, 'import os\n'), ((2312, 2363), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2330, 2363), False, 'import os\n'), ((2487, 2538), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2505, 2538), False, 'import os\n'), ((2663, 2714), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property_graph"""'], {}), "('${GS_TEST_DIR}/property_graph')\n", (2681, 2714), False, 'import os\n'), ((3035, 3079), 'graphscope.framework.loader.Loader', 'Loader', (['p2p_31_e'], {'session': 'graphscope_session'}), '(p2p_31_e, session=graphscope_session)\n', (3041, 3079), False, 'from graphscope.framework.loader import Loader\n'), ((3176, 3220), 'graphscope.framework.loader.Loader', 'Loader', (['p2p_31_v'], {'session': 'graphscope_session'}), '(p2p_31_v, session=graphscope_session)\n', (3182, 3220), False, 'from graphscope.framework.loader import Loader\n'), ((3513, 3564), 'graphscope.framework.loader.Loader', 'Loader', (['student_group_e'], {'session': 'graphscope_session'}), '(student_group_e, session=graphscope_session)\n', (3519, 3564), False, 'from graphscope.framework.loader import Loader\n'), ((3881, 3926), 'graphscope.framework.loader.Loader', 'Loader', (['student_v'], {'session': 'graphscope_session'}), '(student_v, session=graphscope_session)\n', (3887, 3926), False, 'from graphscope.framework.loader import Loader\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import hashlib import itertools import json from typing import List from graphscope.framework.utils import unify_type from graphscope.proto import types_pb2 class Entry: class PropertyDef: def __init__(self, prop_id: int, name: str, data_type: int) -> None: self.id = prop_id self.name = name self.type = data_type def __repr__(self) -> str: return f"Property({self.id}, {self.name})" def __str__(self) -> str: return self.__repr__() def __init__(self, label_id: int, label: str) -> None: self._id = label_id self._label = label self._props = [] self._valid_props = [] self._relations = [] self._prop_index = {} @property def id(self) -> int: return self._id @property def label(self) -> str: return self._label @property def properties(self) -> List: return list(itertools.compress(self._props, self._valid_props)) @property def relations(self) -> List: return self._relations def get_property_id(self, name): idx = self._prop_index[name] if not self._valid_props[idx]: raise ValueError(f"{name} not exist in properties") return idx def add_property(self, name, data_type): self._prop_index[name] = len(self._props) self._props.append(self.PropertyDef(len(self._props), name, data_type)) self._valid_props.append(1) def add_relation(self, src, dst): self._relations.append((src, dst)) def __repr__(self) -> str: s = f"Label: {self.label}\nProperties: {', '.join([str(p) for p in self.properties])}\n" if self._relations: s += f"Relations: {self.relations}" return s def __str__(self) -> str: return self.__repr__() class GraphSchema: """Hold schema of a graph. Attributes: oid_type (str): Original ID type vid_type (str): Internal ID representation vdata_type (str): Type of the data that holding by vertex (simple graph only) edata_type (str): Type of the data that holding by edge (simple graph only) vertex_labels (list): Label names of vertex edge_labels (list): Label names of edge edge_relationships (list(list(tuple))): Source label and destination label of each edge label """ def __init__(self): self._oid_type = None self._vid_type = None # simple graph only self._vdata_type = types_pb2.INVALID self._edata_type = types_pb2.INVALID # list of entries self._vertex_entries: List[Entry] = [] self._edge_entries: List[Entry] = [] # 1 indicate valid, 0 indicate invalid. self._valid_vertices = [] self._valid_edges = [] self._v_label_index = {} self._e_label_index = {} def get_schema_from_def(self, schema_def): """Decode informations from proto message, generated by engine. Args: schema_def (`GraphSchemaDef`): Proto message defined in `proto/graph_def.proto`. Raises: ValueError: If the schema is not valid. """ self._oid_type = schema_def.oid_type self._vid_type = schema_def.vid_type # simple graph schema. if schema_def.vdata_type: self._vdata_type = unify_type(schema_def.vdata_type) if schema_def.edata_type: self._edata_type = unify_type(schema_def.edata_type) # property graph schema if schema_def.property_schema_json: try: schema = json.loads(schema_def.property_schema_json) if schema: for item in schema["types"]: entry = Entry(item["id"], item["label"]) for prop in item["propertyDefList"]: entry.add_property( prop["name"], unify_type(prop["data_type"]) ) entry._valid_props = item["valid_properties"] if item["type"] == "VERTEX": assert entry.id == len(self._vertex_entries) self._vertex_entries.append(entry) self._v_label_index[entry.label] = entry.id elif item["type"] == "EDGE": assert entry.id == len(self._edge_entries) for rel in item["rawRelationShips"]: entry.add_relation( rel["srcVertexLabel"], rel["dstVertexLabel"] ) self._edge_entries.append(entry) self._e_label_index[entry.label] = entry.id self._valid_vertices = schema["valid_vertices"] self._valid_edges = schema["valid_edges"] except Exception as e: raise ValueError("Invalid property graph schema") from e def init_nx_schema(self, gs_schema=None): """Schema for `nx.Graph` Args: gs_schema (`GraphSchema`, optional): schema of a graphscope `Graph`. Defaults to None. """ if gs_schema is not None: for entry in gs_schema._valid_vertex_entries(): for props in entry.properties: if props.name not in self._vertex_entries[0]._prop_index: self._vertex_entries[0].add_property(props.name, props.type) for entry in gs_schema._valid_edge_entries(): for props in entry.properties: if props.name not in self._edge_entries[0]._prop_index: self._edge_entries[0].add_property(props.name, props.type) else: self._vertex_entries.append(Entry(0, "_")) self._v_label_index["_"] = 0 self._edge_entries.append(Entry(0, "_")) self._e_label_index["_"] = 0 self._edge_entries[0].add_relation("_", "_") self._valid_vertices = [1] self._valid_edges = [1] def __repr__(self): s = f"oid_type: {self._oid_type}\nvid_type: {self._vid_type}\n" if ( self._vdata_type != types_pb2.INVALID and self._edata_type != types_pb2.INVALID ): s += f"vdata_type: {types_pb2.DataType.Name(self._vdata_type)}\n" s += f"edata_type: {types_pb2.DataType.Name(self._edata_type)}\n" for entry in self._valid_vertex_entries(): s += f"type: VERTEX\n{str(entry)}\n" for entry in self._valid_edge_entries(): s += f"type: EDGE\n{str(entry)}\n" return s def __str__(self): return self.__repr__() @property def oid_type(self): return self._oid_type @property def vid_type(self): return self._vid_type @property def vdata_type(self): # NB: simple graph only contain a single vertex property. return self._vdata_type @property def edata_type(self): # NB: simple graph only contain a single edge property. return self._edata_type def _valid_vertex_entries(self): return itertools.compress(self._vertex_entries, self._valid_vertices) def _valid_edge_entries(self): return itertools.compress(self._edge_entries, self._valid_edges) @property def vertex_labels(self): return [entry.label for entry in self._valid_vertex_entries()] @property def edge_labels(self): return [entry.label for entry in self._valid_edge_entries()] @property def edge_relationships(self): return [entry.relations for entry in self._valid_edge_entries()] def get_relationships(self, label): return self._edge_entries[self._e_label_index[label]].relations @property def vertex_label_num(self): return sum(self._valid_vertices) @property def edge_label_num(self): return sum(self._valid_edges) def get_vertex_properties(self, label): return self._vertex_entries[self.get_vertex_label_id(label)].properties def get_edge_properties(self, label): return self._edge_entries[self.get_edge_label_id(label)].properties def get_vertex_label_id(self, label): idx = self._v_label_index[label] if not self._valid_vertices[idx]: raise ValueError(f"Vertex {label} not exist in graph") return idx def get_edge_label_id(self, label): idx = self._e_label_index[label] if not self._valid_edges[idx]: raise ValueError(f"Edge {label} not exist in graph") return idx def get_vertex_property_id(self, label, prop): return self._vertex_entries[self.get_vertex_label_id(label)].get_property_id( prop ) def get_edge_property_id(self, label, prop): return self._edge_entries[self.get_edge_label_id(label)].get_property_id(prop) def add_nx_vertex_properties(self, properties) -> bool: for key, value in properties.items(): try: prop_type = unify_type(type(value)) for prop in self._vertex_entries[0].properties: if key == prop.name: if prop_type != prop.type: return False break else: self._vertex_entries[0].add_property(key, prop_type) except TypeError: return False return True def add_nx_edge_properties(self, properties) -> bool: for key, value in properties.items(): try: prop_type = unify_type(type(value)) for prop in self._edge_entries[0].properties: if key == prop.name: if prop_type != prop.type: return False break else: self._edge_entries[0].add_property(key, prop_type) except TypeError: return False return True def clear(self): self._oid_type = None self._vid_type = None self._vdata_type = types_pb2.INVALID self._edata_type = types_pb2.INVALID self._vertex_entries.clear() self._edge_entries.clear() self._valid_vertices.clear() self._valid_edges.clear() self._v_label_index.clear() self._e_label_index.clear() def signature(self): return hashlib.sha256("{}".format(self.__repr__()).encode("utf-8")).hexdigest()
[ "json.loads", "graphscope.proto.types_pb2.DataType.Name", "graphscope.framework.utils.unify_type", "itertools.compress" ]
[((8013, 8075), 'itertools.compress', 'itertools.compress', (['self._vertex_entries', 'self._valid_vertices'], {}), '(self._vertex_entries, self._valid_vertices)\n', (8031, 8075), False, 'import itertools\n'), ((8127, 8184), 'itertools.compress', 'itertools.compress', (['self._edge_entries', 'self._valid_edges'], {}), '(self._edge_entries, self._valid_edges)\n', (8145, 8184), False, 'import itertools\n'), ((1629, 1679), 'itertools.compress', 'itertools.compress', (['self._props', 'self._valid_props'], {}), '(self._props, self._valid_props)\n', (1647, 1679), False, 'import itertools\n'), ((4076, 4109), 'graphscope.framework.utils.unify_type', 'unify_type', (['schema_def.vdata_type'], {}), '(schema_def.vdata_type)\n', (4086, 4109), False, 'from graphscope.framework.utils import unify_type\n'), ((4175, 4208), 'graphscope.framework.utils.unify_type', 'unify_type', (['schema_def.edata_type'], {}), '(schema_def.edata_type)\n', (4185, 4208), False, 'from graphscope.framework.utils import unify_type\n'), ((4328, 4371), 'json.loads', 'json.loads', (['schema_def.property_schema_json'], {}), '(schema_def.property_schema_json)\n', (4338, 4371), False, 'import json\n'), ((7154, 7195), 'graphscope.proto.types_pb2.DataType.Name', 'types_pb2.DataType.Name', (['self._vdata_type'], {}), '(self._vdata_type)\n', (7177, 7195), False, 'from graphscope.proto import types_pb2\n'), ((7232, 7273), 'graphscope.proto.types_pb2.DataType.Name', 'types_pb2.DataType.Name', (['self._edata_type'], {}), '(self._edata_type)\n', (7255, 7273), False, 'from graphscope.proto import types_pb2\n'), ((4668, 4697), 'graphscope.framework.utils.unify_type', 'unify_type', (["prop['data_type']"], {}), "(prop['data_type'])\n", (4678, 4697), False, 'from graphscope.framework.utils import unify_type\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from itertools import chain from typing import Any from typing import Dict from typing import Iterable from typing import Mapping from typing import Sequence from typing import Tuple from typing import Union import numpy as np import pandas as pd import vineyard from graphscope.framework import utils from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.errors import check_argument from graphscope.framework.loader import Loader from graphscope.proto import attr_value_pb2 from graphscope.proto import types_pb2 VineyardObjectTypes = (vineyard.Object, vineyard.ObjectID, vineyard.ObjectName) LoaderVariants = Union[ Loader, str, Sequence[np.ndarray], pd.DataFrame, vineyard.Object, vineyard.ObjectID, vineyard.ObjectName, ] class VertexLabel(object): """Holds meta informations about a single vertex label.""" def __init__( self, label: str, loader: Any, properties: Sequence = None, vid_field: Union[str, int] = 0, ): self.label = label if isinstance(loader, Loader): self.loader = loader else: self.loader = Loader(loader) self.raw_properties = properties self.properties = [] self.vid_field = vid_field self._finished = False def finish(self, id_type: str = "int64_t"): # Normalize properties # Add vid to property list if self._finished: return self.add_property(str(self.vid_field), id_type) if self.raw_properties: self.add_properties(self.raw_properties) elif self.loader.deduced_properties: self.add_properties(self.loader.deduced_properties) self.loader.select_columns( self.properties, include_all=bool(not self.raw_properties) ) self.loader.finish() self._finished = True def __str__(self) -> str: s = "\ntype: VertexLabel" s += "\nlabel: " + self.label s += "\nproperties: " + str(self.properties) s += "\nvid: " + str(self.vid_field) s += "\nloader: " + repr(self.loader) return s def __repr__(self) -> str: return self.__str__() def add_property(self, prop: str, dtype=None) -> None: """prop is a str, representing name. It can optionally have a type.""" self.properties.append((prop, utils.unify_type(dtype))) def add_properties(self, properties: Sequence) -> None: for prop in properties: if isinstance(prop, str): self.add_property(prop) else: self.add_property(prop[0], prop[1]) class EdgeSubLabel(object): """Hold meta informations of a single relationship. i.e. src_label -> edge_label -> dst_label """ def __init__( self, loader, properties=None, src_label: str = "_", dst_label: str = "_", src_field: Union[str, int] = 0, dst_field: Union[str, int] = 1, load_strategy="both_out_in", ): if isinstance(loader, Loader): self.loader = loader else: self.loader = Loader(loader) self.raw_properties = properties self.properties = [] self.src_label = src_label self.dst_label = dst_label self.src_field = src_field self.dst_field = dst_field self._finished = False check_argument( load_strategy in ("only_out", "only_in", "both_out_in"), "invalid load strategy: " + load_strategy, ) self.load_strategy = load_strategy if (isinstance(self.src_field, int) and isinstance(self.dst_field, str)) or ( isinstance(self.src_field, str) and isinstance(self.dst_field, int) ): print("src field", self.src_field, "dst_field", self.dst_field) raise SyntaxError( "Source vid and destination vid must have same formats, both use name or both use index" ) def finish(self, id_type: str = "int64_t"): if self._finished: return self.add_property(str(self.src_field), id_type) self.add_property(str(self.dst_field), id_type) if self.raw_properties: self.add_properties(self.raw_properties) elif self.loader.deduced_properties: self.add_properties(self.loader.deduced_properties) self.loader.select_columns( self.properties, include_all=bool(not self.raw_properties) ) self.loader.finish() self._finished = True def __str__(self) -> str: s = "\ntype: EdgeSubLabel" s += "\nsource_label: " + self.src_label s += "\ndestination_label: " + self.dst_label s += "\nproperties: " + str(self.properties) s += "\nloader: " + repr(self.loader) return s def __repr__(self) -> str: return self.__str__() def add_property(self, prop: str, dtype=None) -> None: """prop is a str, representing name. It can optionally have a type.""" self.properties.append((prop, utils.unify_type(dtype))) def add_properties(self, properties: Sequence) -> None: for prop in properties: if isinstance(prop, str): self.add_property(prop) else: self.add_property(prop[0], prop[1]) def get_attr(self): attr_list = attr_value_pb2.NameAttrList() attr_list.name = "{}_{}".format(self.src_label, self.dst_label) attr_list.attr[types_pb2.SRC_LABEL].CopyFrom(utils.s_to_attr(self.src_label)) attr_list.attr[types_pb2.DST_LABEL].CopyFrom(utils.s_to_attr(self.dst_label)) attr_list.attr[types_pb2.LOAD_STRATEGY].CopyFrom( utils.s_to_attr(self.load_strategy) ) attr_list.attr[types_pb2.SRC_VID].CopyFrom(utils.s_to_attr(str(self.src_field))) attr_list.attr[types_pb2.DST_VID].CopyFrom(utils.s_to_attr(str(self.dst_field))) attr_list.attr[types_pb2.LOADER].CopyFrom(self.loader.get_attr()) props = [] for prop in self.properties[2:]: prop_attr = attr_value_pb2.NameAttrList() prop_attr.name = prop[0] prop_attr.attr[0].CopyFrom(utils.type_to_attr(prop[1])) props.append(prop_attr) attr_list.attr[types_pb2.PROPERTIES].list.func.extend(props) return attr_list class EdgeLabel(object): """Hold meta informations of an edge label. An Edge label may be consist of a few `EdgeSubLabel`s. i.e. src_label1 -> edge_label -> dst_label1 src_label2 -> edge_label -> dst_label2 src_label3 -> edge_label -> dst_label3 """ def __init__(self, label: str): self.label = label self.sub_labels = {} self._finished = False def __str__(self): s = "\ntype: EdgeLabel" s += "\nlabel: " + self.label s += "\nsub_labels: " for sub_label in self.sub_labels.values(): s += "\n" s += str(sub_label) return s def __repr__(self): return self.__str__() def add_sub_label(self, sub_label): src = sub_label.src_label dst = sub_label.dst_label if (src, dst) in self.sub_labels: raise ValueError( f"The relationship {src} -> {self.label} <- {dst} already existed in graph." ) self.sub_labels[(src, dst)] = sub_label def finish(self, id_type: str = "int64_t"): if self._finished: return for sub_label in self.sub_labels.values(): sub_label.finish(id_type) self._finished = True def process_vertex(vertex: VertexLabel) -> attr_value_pb2.NameAttrList: attr_list = attr_value_pb2.NameAttrList() attr_list.name = "vertex" attr_list.attr[types_pb2.LABEL].CopyFrom(utils.s_to_attr(vertex.label)) attr_list.attr[types_pb2.VID].CopyFrom(utils.s_to_attr(str(vertex.vid_field))) props = [] for prop in vertex.properties[1:]: prop_attr = attr_value_pb2.NameAttrList() prop_attr.name = prop[0] prop_attr.attr[0].CopyFrom(utils.type_to_attr(prop[1])) props.append(prop_attr) attr_list.attr[types_pb2.PROPERTIES].list.func.extend(props) attr_list.attr[types_pb2.LOADER].CopyFrom(vertex.loader.get_attr()) return attr_list def process_edge(edge: EdgeLabel) -> attr_value_pb2.NameAttrList: attr_list = attr_value_pb2.NameAttrList() attr_list.name = "edge" attr_list.attr[types_pb2.LABEL].CopyFrom(utils.s_to_attr(edge.label)) sub_label_attr = [sub_label.get_attr() for sub_label in edge.sub_labels.values()] attr_list.attr[types_pb2.SUB_LABEL].list.func.extend(sub_label_attr) return attr_list def assemble_op_config( vertices: Iterable[VertexLabel], edges: Iterable[EdgeLabel], oid_type: str, directed: bool, generate_eid: bool, ) -> Dict: attr = attr_value_pb2.AttrValue() for label in chain(vertices, edges): label.finish(oid_type) attr.list.func.extend([process_vertex(vertex) for vertex in vertices]) attr.list.func.extend([process_edge(edge) for edge in edges]) config = {} config[types_pb2.ARROW_PROPERTY_DEFINITION] = attr config[types_pb2.DIRECTED] = utils.b_to_attr(directed) config[types_pb2.OID_TYPE] = utils.s_to_attr(oid_type) config[types_pb2.GENERATE_EID] = utils.b_to_attr(generate_eid) # vid_type is fixed config[types_pb2.VID_TYPE] = utils.s_to_attr("uint64_t") config[types_pb2.IS_FROM_VINEYARD_ID] = utils.b_to_attr(False) return config def _convert_array_to_deprecated_form(items): compat_items = [] for i in range(len(items)): if i < 2: compat_items.append(items[i]) elif i == 2: if isinstance(items[i], (int, str)) and isinstance( items[i + 1], (int, str) ): compat_items.append("_") compat_items.append("_") compat_items.append(items[i]) compat_items.append(items[i + 1]) else: assert len(items[i]) == 2 and len(items[i + 1]) == 2 compat_items.append(items[i][1]) compat_items.append(items[i + 1][1]) compat_items.append(items[i][0]) compat_items.append(items[i + 1][0]) elif i == 3: pass else: compat_items.append(items[i]) return compat_items def _convert_dict_to_compat_form(items): if "source" in items: if isinstance(items["source"], (int, str)): items["src_label"] = "_" items["src_field"] = items["source"] else: assert len(items["source"]) == 2 items["src_label"] = items["source"][1] items["src_field"] = items["source"][0] items.pop("source") if "destination" in items: if isinstance(items["destination"], (int, str)): items["dst_label"] = "_" items["dst_field"] = items["destination"] else: assert len(items["destination"]) == 2 items["dst_label"] = items["destination"][1] items["dst_field"] = items["destination"][0] items.pop("destination") return items def normalize_parameter_edges( edges: Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants ] ): """Normalize parameters user passed in. Since parameters are very flexible, we need to be careful about it. Args: edges (Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants ]): Edges definition. """ def process_sub_label(items): if isinstance(items, (Loader, str, pd.DataFrame, *VineyardObjectTypes)): return EdgeSubLabel(items, None, "_", "_", 0, 1) elif isinstance(items, Sequence): if all([isinstance(item, np.ndarray) for item in items]): return EdgeSubLabel(items, None, "_", "_", 0, 1) else: check_argument(len(items) < 6, "Too many arguments for a edge label") compat_items = _convert_array_to_deprecated_form(items) return EdgeSubLabel(*compat_items) elif isinstance(items, Mapping): items = _convert_dict_to_compat_form(items) return EdgeSubLabel(**items) else: raise SyntaxError("Wrong format of e sub label: " + str(items)) def process_label(label, items): e_label = EdgeLabel(label) if isinstance(items, (Loader, str, pd.DataFrame, *VineyardObjectTypes)): e_label.add_sub_label(process_sub_label(items)) elif isinstance(items, Sequence): if isinstance( items[0], (Loader, str, pd.DataFrame, *VineyardObjectTypes, np.ndarray) ): e_label.add_sub_label(process_sub_label(items)) else: for item in items: e_label.add_sub_label(process_sub_label(item)) elif isinstance(items, Mapping): e_label.add_sub_label(process_sub_label(items)) else: raise SyntaxError("Wrong format of e label: " + str(items)) return e_label e_labels = [] if edges is None: raise ValueError("Edges should be None") if isinstance(edges, Mapping): for label, attr in edges.items(): e_labels.append(process_label(label, attr)) else: e_labels.append(process_label("_", edges)) return e_labels def normalize_parameter_vertices( vertices: Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants, None, ] ): """Normalize parameters user passed in. Since parameters are very flexible, we need to be careful about it. Args: vertices (Union[ Mapping[str, Union[Sequence, LoaderVariants, Mapping]], Tuple, LoaderVariants, None, ]): Vertices definition. """ def process_label(label, items): if isinstance(items, (Loader, str, pd.DataFrame, *VineyardObjectTypes)): return VertexLabel(label=label, loader=items) elif isinstance(items, Sequence): if all([isinstance(item, np.ndarray) for item in items]): return VertexLabel(label=label, loader=items) else: check_argument(len(items) < 4, "Too many arguments for a vertex label") return VertexLabel(label, *items) elif isinstance(items, Mapping): if "vid" in items: items["vid_field"] = items["vid"] items.pop("vid") return VertexLabel(label, **items) else: raise RuntimeError("Wrong format of v label: " + str(items)) v_labels = [] if vertices is None: return v_labels if isinstance(vertices, Mapping): for label, attr in vertices.items(): v_labels.append(process_label(label, attr)) else: v_labels.append(process_label("_", vertices)) return v_labels
[ "graphscope.proto.attr_value_pb2.NameAttrList", "itertools.chain", "graphscope.framework.loader.Loader", "graphscope.framework.utils.type_to_attr", "graphscope.framework.utils.s_to_attr", "graphscope.proto.attr_value_pb2.AttrValue", "graphscope.framework.utils.b_to_attr", "graphscope.framework.utils.unify_type", "graphscope.framework.errors.check_argument" ]
[((8489, 8518), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (8516, 8518), False, 'from graphscope.proto import attr_value_pb2\n'), ((9187, 9216), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (9214, 9216), False, 'from graphscope.proto import attr_value_pb2\n'), ((9681, 9707), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {}), '()\n', (9705, 9707), False, 'from graphscope.proto import attr_value_pb2\n'), ((9726, 9748), 'itertools.chain', 'chain', (['vertices', 'edges'], {}), '(vertices, edges)\n', (9731, 9748), False, 'from itertools import chain\n'), ((10028, 10053), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['directed'], {}), '(directed)\n', (10043, 10053), False, 'from graphscope.framework import utils\n'), ((10087, 10112), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['oid_type'], {}), '(oid_type)\n', (10102, 10112), False, 'from graphscope.framework import utils\n'), ((10150, 10179), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['generate_eid'], {}), '(generate_eid)\n', (10165, 10179), False, 'from graphscope.framework import utils\n'), ((10237, 10264), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['"""uint64_t"""'], {}), "('uint64_t')\n", (10252, 10264), False, 'from graphscope.framework import utils\n'), ((10309, 10331), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (['(False)'], {}), '(False)\n', (10324, 10331), False, 'from graphscope.framework import utils\n'), ((4135, 4254), 'graphscope.framework.errors.check_argument', 'check_argument', (["(load_strategy in ('only_out', 'only_in', 'both_out_in'))", "('invalid load strategy: ' + load_strategy)"], {}), "(load_strategy in ('only_out', 'only_in', 'both_out_in'), \n 'invalid load strategy: ' + load_strategy)\n", (4149, 4254), False, 'from graphscope.framework.errors import check_argument\n'), ((6145, 6174), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (6172, 6174), False, 'from graphscope.proto import attr_value_pb2\n'), ((8595, 8624), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex.label'], {}), '(vertex.label)\n', (8610, 8624), False, 'from graphscope.framework import utils\n'), ((8785, 8814), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (8812, 8814), False, 'from graphscope.proto import attr_value_pb2\n'), ((9291, 9318), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['edge.label'], {}), '(edge.label)\n', (9306, 9318), False, 'from graphscope.framework import utils\n'), ((1848, 1862), 'graphscope.framework.loader.Loader', 'Loader', (['loader'], {}), '(loader)\n', (1854, 1862), False, 'from graphscope.framework.loader import Loader\n'), ((3868, 3882), 'graphscope.framework.loader.Loader', 'Loader', (['loader'], {}), '(loader)\n', (3874, 3882), False, 'from graphscope.framework.loader import Loader\n'), ((6300, 6331), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.src_label'], {}), '(self.src_label)\n', (6315, 6331), False, 'from graphscope.framework import utils\n'), ((6386, 6417), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.dst_label'], {}), '(self.dst_label)\n', (6401, 6417), False, 'from graphscope.framework import utils\n'), ((6489, 6524), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.load_strategy'], {}), '(self.load_strategy)\n', (6504, 6524), False, 'from graphscope.framework import utils\n'), ((6873, 6902), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (6900, 6902), False, 'from graphscope.proto import attr_value_pb2\n'), ((8883, 8910), 'graphscope.framework.utils.type_to_attr', 'utils.type_to_attr', (['prop[1]'], {}), '(prop[1])\n', (8901, 8910), False, 'from graphscope.framework import utils\n'), ((3091, 3114), 'graphscope.framework.utils.unify_type', 'utils.unify_type', (['dtype'], {}), '(dtype)\n', (3107, 3114), False, 'from graphscope.framework import utils\n'), ((5833, 5856), 'graphscope.framework.utils.unify_type', 'utils.unify_type', (['dtype'], {}), '(dtype)\n', (5849, 5856), False, 'from graphscope.framework import utils\n'), ((6979, 7006), 'graphscope.framework.utils.type_to_attr', 'utils.type_to_attr', (['prop[1]'], {}), '(prop[1])\n', (6997, 7006), False, 'from graphscope.framework import utils\n')]
import networkx.algorithms.centrality.tests.test_betweenness_centrality import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx( networkx.algorithms.centrality.tests.test_betweenness_centrality, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.centrality.tests.test_betweenness_centrality import \ TestBetweennessCentrality @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestBetweennessCentrality) class TestBetweennessCentrality: @pytest.mark.skip(reason="not support sampling") def test_sample_from_P3(self): G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1) for n in sorted(G): assert b[n] == pytest.approx(b_answer[n], abs=1e-7) b = nx.betweenness_centrality(G, k=2, weight=None, normalized=False, seed=1) # python versions give different results with same seed b_approx1 = {0: 0.0, 1: 1.5, 2: 0.0} b_approx2 = {0: 0.0, 1: 0.75, 2: 0.0} for n in sorted(G): assert b[n] in (b_approx1[n], b_approx2[n])
[ "pytest.approx", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((487, 532), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (510, 532), False, 'import pytest\n'), ((534, 587), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestBetweennessCentrality'], {}), '(TestBetweennessCentrality)\n', (560, 587), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((626, 673), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support sampling"""'}), "(reason='not support sampling')\n", (642, 673), False, 'import pytest\n'), ((327, 372), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (350, 372), False, 'import pytest\n'), ((922, 959), 'pytest.approx', 'pytest.approx', (['b_answer[n]'], {'abs': '(1e-07)'}), '(b_answer[n], abs=1e-07)\n', (935, 959), False, 'import pytest\n')]
"""Generators - Directed Graphs ---------------------------- """ import pytest from networkx.generators.tests.test_directed import TestGeneratorsDirected from networkx.generators.tests.test_directed import TestRandomKOutGraph from networkx.generators.tests.test_directed import TestUniformRandomKOutGraph import graphscope.nx as nx from graphscope.nx.classes import Graph from graphscope.nx.classes import MultiDiGraph from graphscope.nx.generators.directed import gn_graph from graphscope.nx.generators.directed import gnc_graph from graphscope.nx.generators.directed import gnr_graph from graphscope.nx.generators.directed import random_k_out_graph from graphscope.nx.generators.directed import random_uniform_k_out_graph from graphscope.nx.generators.directed import scale_free_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGeneratorsDirected) class TestGeneratorsDirected: pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestRandomKOutGraph) class TestRandomKOutGraph: pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestUniformRandomKOutGraph) class TestUniformRandomKOutGraph: pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((857, 902), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (880, 902), False, 'import pytest\n'), ((904, 954), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGeneratorsDirected'], {}), '(TestGeneratorsDirected)\n', (930, 954), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((997, 1042), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1020, 1042), False, 'import pytest\n'), ((1044, 1091), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestRandomKOutGraph'], {}), '(TestRandomKOutGraph)\n', (1070, 1091), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1131, 1176), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1154, 1176), False, 'import pytest\n'), ((1178, 1232), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestUniformRandomKOutGraph'], {}), '(TestUniformRandomKOutGraph)\n', (1204, 1232), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
import pytest # fmt: off from networkx.generators.tests.test_spectral_graph_forge import \ test_spectral_graph_forge from graphscope.nx import NetworkXError from graphscope.nx import is_isomorphic from graphscope.nx.generators import karate_club_graph from graphscope.nx.generators.spectral_graph_forge import spectral_graph_forge from graphscope.nx.tests.utils import assert_nodes_equal from graphscope.nx.utils.compat import with_graphscope_nx_context # fmt: on @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_spectral_graph_forge) def test_spectral_graph_forge(): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((474, 519), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (497, 519), False, 'import pytest\n'), ((521, 574), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_spectral_graph_forge'], {}), '(test_spectral_graph_forge)\n', (547, 574), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from graphscope.framework import utils from graphscope.framework.errors import check_argument from graphscope.framework.operation import Operation from graphscope.proto import attr_value_pb2 from graphscope.proto import query_args_pb2 from graphscope.proto import types_pb2 def create_app(graph, app): """Wrapper for create an `CREATE_APP` Operation with configuration. Compile and load an application after evaluated. Args: graph (:class:`Graph`): A :class:`Graph` instance app (:class:`App`): A :class:`App` instance. Returns: An :class:`Operation` with configuration that instruct analytical engine how to build the app. """ config = { types_pb2.APP_ALGO: utils.s_to_attr(app.algo), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(graph.graph_type), types_pb2.OID_TYPE: utils.s_to_attr( utils.normalize_data_type_str(graph.schema.oid_type) ), types_pb2.VID_TYPE: utils.s_to_attr(graph.schema.vid_type), types_pb2.V_DATA_TYPE: utils.s_to_attr( utils.data_type_to_cpp(graph.schema.vdata_type) ), types_pb2.E_DATA_TYPE: utils.s_to_attr( utils.data_type_to_cpp(graph.schema.edata_type) ), } if app.gar is not None: config[types_pb2.GAR] = utils.bytes_to_attr(app.gar) opr = Operation( graph.session_id, types_pb2.CREATE_APP, config=config, output_types=types_pb2.APP, ) return opr def create_graph(session_id, graph_type, **kwargs): """Create an `CREATE_GRAPH` op, add op to default dag. Args: session_id (str): Refer to session that the graph will be create on. graph_type (:enum:`GraphType`): GraphType defined in proto.types.proto. **kwargs: additional properties respect to different `graph_type`. Returns: An op to create a graph in c++ side with necessary configurations. """ config = { types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(graph_type), } if graph_type == types_pb2.ARROW_PROPERTY: attrs = kwargs.pop("attrs", None) if attrs: for k, v in attrs.items(): if isinstance(v, attr_value_pb2.AttrValue): config[k] = v elif graph_type == types_pb2.DYNAMIC_PROPERTY: config[types_pb2.E_FILE] = utils.s_to_attr(kwargs["efile"]) config[types_pb2.V_FILE] = utils.s_to_attr(kwargs["vfile"]) config[types_pb2.DIRECTED] = utils.b_to_attr(kwargs["directed"]) config[types_pb2.DISTRIBUTED] = utils.b_to_attr(kwargs["distributed"]) else: raise RuntimeError("Not supported graph type {}".format(graph_type)) op = Operation( session_id, types_pb2.CREATE_GRAPH, config=config, output_types=types_pb2.GRAPH ) return op def add_labels_to_graph(graph, **kwargs): """Add new labels to existed graph. Args: graph (:class:`Graph`): A graph instance. May not be fully loaded. i.e. it's in a building procedure. Raises: NotImplementedError: When encountered not supported graph type. Returns: The operation. Notes: Since we don't want to trigger the loading, we must not use any api that can trigger the loading process implicitly. """ config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph._key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(graph._graph_type), } if graph._graph_type == types_pb2.ARROW_PROPERTY: attrs = kwargs.pop("attrs", None) if attrs: for k, v in attrs.items(): if isinstance(v, attr_value_pb2.AttrValue): config[k] = v else: raise NotImplementedError( f"Add vertices or edges is not supported yet on graph type {graph._graph_type}" ) op = Operation( graph._session.session_id, types_pb2.ADD_LABELS, config=config, output_types=types_pb2.GRAPH, ) return op def dynamic_to_arrow(graph): """Create an op to transform a :class:`nx.Graph` object to :class:`Graph`. Args: graph (:class:`Graph`): Source graph, which type should be DYNAMIC_PROPERTY Returns: An op of transform dynamic graph to arrow graph with necessary configurations. """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) oid_type = None for node in graph: if oid_type is None: oid_type = type(node) elif oid_type != type(node): raise RuntimeError( "The vertex type is not consistent {} vs {}, can not convert it to arrow graph".format( str(oid_type), str(type(node)) ) ) if oid_type == int or oid_type is None: oid_type = utils.data_type_to_cpp(types_pb2.INT64) elif oid_type == str: oid_type = utils.data_type_to_cpp(types_pb2.STRING) else: raise RuntimeError("Unsupported oid type: " + str(oid_type)) vid_type = utils.data_type_to_cpp(types_pb2.UINT64) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(types_pb2.ARROW_PROPERTY), types_pb2.DST_GRAPH_TYPE: utils.graph_type_to_attr(types_pb2.ARROW_PROPERTY), types_pb2.OID_TYPE: utils.s_to_attr(oid_type), types_pb2.VID_TYPE: utils.s_to_attr(vid_type), } op = Operation( graph.session_id, types_pb2.TRANSFORM_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def arrow_to_dynamic(graph): """Transform a :class:`Graph` object to :class:`nx.Graph`. Args: graph (:class:`Graph`): Source graph, which type should be ARROW_PROPERTY. Returns: An op of transform arrow graph to dynamic graph with necessary configurations. """ check_argument(graph.graph_type == types_pb2.ARROW_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(types_pb2.ARROW_PROPERTY), types_pb2.DST_GRAPH_TYPE: utils.graph_type_to_attr(types_pb2.DYNAMIC_PROPERTY), types_pb2.OID_TYPE: utils.s_to_attr(graph.schema.oid_type), types_pb2.VID_TYPE: utils.s_to_attr(graph.schema.vid_type), } op = Operation( graph.session_id, types_pb2.TRANSFORM_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def modify_edges(graph, modify_type, edges): """Create modify edges operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. modify_type (`type_pb2.(NX_ADD_EDGES | NX_DEL_EDGES | NX_UPDATE_EDGES)`): The modify type edges (list): List of edges to be inserted into or delete from graph based on `modify_type` Returns: An op to modify edges on the graph. """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = {} config[types_pb2.GRAPH_NAME] = utils.s_to_attr(graph.key) config[types_pb2.MODIFY_TYPE] = utils.modify_type_to_attr(modify_type) config[types_pb2.EDGES] = utils.list_str_to_attr(edges) op = Operation( graph.session_id, types_pb2.MODIFY_EDGES, config=config, output_types=types_pb2.GRAPH, ) return op def modify_vertices(graph, modify_type, vertices): """Create modify vertices operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. modify_type (`type_pb2.(NX_ADD_NODES | NX_DEL_NODES | NX_UPDATE_NODES)`): The modify type vertices (list): node list. Returns: An op to modify vertices on the graph. """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = {} config[types_pb2.GRAPH_NAME] = utils.s_to_attr(graph.key) config[types_pb2.MODIFY_TYPE] = utils.modify_type_to_attr(modify_type) config[types_pb2.NODES] = utils.list_str_to_attr(vertices) op = Operation( graph.session_id, types_pb2.MODIFY_VERTICES, config=config, output_types=types_pb2.GRAPH, ) return op def run_app(graph, app, *args, **kwargs): """Run `app` on the `graph`. Args: graph (:class:`Graph`): A loaded graph. app (:class:`App`): A loaded app that will be queried. key (str): Key of query results, can be used to retrieve results. *args: Additional query params that will be used in evaluation. **kwargs: Key-value formated query params that mostly used in Cython apps. Returns: An op to run app on the specified graph, with optional query parameters. """ config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.APP_NAME: utils.s_to_attr(app.key), } output_prefix = kwargs.pop("output_prefix", ".") config[types_pb2.OUTPUT_PREFIX] = utils.s_to_attr(output_prefix) # optional query arguments. params = utils.pack_query_params(*args, **kwargs) query_args = query_args_pb2.QueryArgs() query_args.args.extend(params) op = Operation( graph.session_id, types_pb2.RUN_APP, config=config, output_types=types_pb2.RESULTS, query_args=query_args, ) return op def report_graph( graph, report_type, node=None, edge=None, fid=None, lid=None, key=None ): """Create report operation for nx graph. This operation is used to simulate networkx graph reporting methods with variaty report type and corresponding config parameters. Args: graph (`nx.Graph`): A nx graph. report_type: report type, can be type_pb2.(NODE_NUM, EDGE_NUM, HAS_NODE, HAS_EDGE, NODE_DATA, EDGE_DATA, NEIGHBORS_BY_NODE, SUCCS_BY_NODE, PREDS_BY_NODE, NEIGHBORS_BY_LOC, SUCCS_BY_LOC, PREDS_BY_LOC, DEG_BY_NODE, IN_DEG_BY_NODE, OUT_DEG_BY_NODE, DEG_BY_LOC, IN_DEG_BY_LOC, OUT_DEG_BY_LOC, NODES_BY_LOC) node (str): node id, used as node id with 'NODE' report types. (optional) edge (str): an edge with 'EDGE' report types. (optional) fid (int): fragment id, with 'LOC' report types. (optional) lid (int): local id of node in grape_engine, with 'LOC; report types. (optional) key (str): edge key for MultiGraph or MultiDiGraph, with 'EDGE' report types. (optional) Returns: An op to do reporting job. """ config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.REPORT_TYPE: utils.report_type_to_attr(report_type), } if node is not None: config[types_pb2.NODE] = utils.s_to_attr(node) if edge is not None: config[types_pb2.EDGE] = utils.s_to_attr(edge) if fid is not None: config[types_pb2.FID] = utils.i_to_attr(fid) if lid is not None: config[types_pb2.LID] = utils.i_to_attr(lid) config[types_pb2.EDGE_KEY] = utils.s_to_attr(str(key) if key is not None else "") op = Operation( graph.session_id, types_pb2.REPORT_GRAPH, config=config, output_types=types_pb2.RESULTS, ) return op def project_arrow_property_graph(graph, vertex_collections, edge_collections): check_argument(graph.graph_type == types_pb2.ARROW_PROPERTY) attr = attr_value_pb2.AttrValue() v_attr = attr_value_pb2.NameAttrList() e_attr = attr_value_pb2.NameAttrList() for label, props in vertex_collections.items(): v_attr.attr[label].CopyFrom(utils.list_i_to_attr(props)) for label, props in edge_collections.items(): e_attr.attr[label].CopyFrom(utils.list_i_to_attr(props)) attr.list.func.extend([v_attr, e_attr]) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(graph.graph_type), types_pb2.ARROW_PROPERTY_DEFINITION: attr, } op = Operation( graph.session_id, types_pb2.PROJECT_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def project_arrow_property_graph_to_simple( graph, v_label_id, v_prop_id, e_label_id, e_prop_id, v_data_type, e_data_type, oid_type=None, vid_type=None, ): """Project arrow property graph to a simple graph. Args: graph (:class:`Graph`): Source graph, which type should be ARROW_PROPERTY dst_graph_key (str): The key of projected graph. v_label_id (int): Label id of vertex used to project. v_prop_id (int): Property id of vertex used to project. e_label_id (int): Label id of edge used to project. e_prop_id (int): Property id of edge used to project. Returns: An op to project `graph`, results in a simple ARROW_PROJECTED graph. """ check_argument(graph.graph_type == types_pb2.ARROW_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(types_pb2.ARROW_PROJECTED), types_pb2.V_LABEL_ID: utils.i_to_attr(v_label_id), types_pb2.V_PROP_ID: utils.i_to_attr(v_prop_id), types_pb2.E_LABEL_ID: utils.i_to_attr(e_label_id), types_pb2.E_PROP_ID: utils.i_to_attr(e_prop_id), types_pb2.OID_TYPE: utils.s_to_attr(oid_type), types_pb2.VID_TYPE: utils.s_to_attr(vid_type), types_pb2.V_DATA_TYPE: utils.s_to_attr(utils.data_type_to_cpp(v_data_type)), types_pb2.E_DATA_TYPE: utils.s_to_attr(utils.data_type_to_cpp(e_data_type)), } op = Operation( graph.session_id, types_pb2.PROJECT_TO_SIMPLE, config=config, output_types=types_pb2.GRAPH, ) return op def project_dynamic_property_graph(graph, v_prop, e_prop, v_prop_type, e_prop_type): """Create project graph operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. v_prop (str): The node attribute key to project. e_prop (str): The edge attribute key to project. v_prop_type (str): Type of the node attribute. e_prop_type (str): Type of the edge attribute. Returns: Operation to project a dynamic property graph. Results in a simple graph. """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(types_pb2.DYNAMIC_PROJECTED), types_pb2.V_PROP_KEY: utils.s_to_attr(v_prop), types_pb2.E_PROP_KEY: utils.s_to_attr(e_prop), types_pb2.V_DATA_TYPE: utils.s_to_attr(utils.data_type_to_cpp(v_prop_type)), types_pb2.E_DATA_TYPE: utils.s_to_attr(utils.data_type_to_cpp(e_prop_type)), } op = Operation( graph.session_id, types_pb2.PROJECT_TO_SIMPLE, config=config, output_types=types_pb2.GRAPH, ) return op def copy_graph(graph, copy_type="identical"): """Create copy operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. copy_type (str): 'identical': copy graph to destination graph without any change. 'reverse': copy graph to destination graph with reversing the graph edges Returns: Operation """ check_argument( graph.graph_type in (types_pb2.ARROW_PROPERTY, types_pb2.DYNAMIC_PROPERTY) ) check_argument(copy_type in ("identical", "reverse")) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.COPY_TYPE: utils.s_to_attr(copy_type), } op = Operation( graph.session_id, types_pb2.COPY_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def to_directed(graph): """Create to_directed operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. Returns: Operation """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } op = Operation( graph.session_id, types_pb2.TO_DIRECTED, config=config, output_types=types_pb2.GRAPH, ) return op def to_undirected(graph): """Create to_undirected operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. Returns: Operation """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } op = Operation( graph.session_id, types_pb2.TO_UNDIRECTED, config=config, output_types=types_pb2.GRAPH, ) return op def create_graph_view(graph, view_type): """Create view of nx graph. Args: graph (:class:`nx.Graph`): A nx graph. view_type (str): 'reversed': get a reverse view of graph. 'directed': get a directed view of graph 'undirected': get a undirected view of graph Returns: Operation """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) check_argument(view_type in ("reversed", "directed", "undirected")) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.VIEW_TYPE: utils.s_to_attr(view_type), } op = Operation( graph.session_id, types_pb2.VIEW_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def clear_graph(graph): """Create clear graph operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. Returns: An op to modify edges on the graph. """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } op = Operation( graph.session_id, types_pb2.CLEAR_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def clear_edges(graph): """Create clear edges operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. Returns: An op to modify edges on the graph. """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } op = Operation( graph.session_id, types_pb2.CLEAR_EDGES, config=config, output_types=types_pb2.GRAPH, ) return op def create_subgraph(graph, nodes=None, edges=None): """Create subgraph operation for nx graph. Args: graph (:class:`nx.Graph`): A nx graph. nodes (list): the nodes to induce a subgraph. edges (list): the edges to induce a edge-induced subgraph. Returns: Operation """ check_argument(graph.graph_type == types_pb2.DYNAMIC_PROPERTY) config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } if nodes is not None: config[types_pb2.NODES] = utils.list_str_to_attr(nodes) if edges is not None: config[types_pb2.EDGES] = utils.list_str_to_attr(edges) op = Operation( graph.session_id, types_pb2.INDUCE_SUBGRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def unload_app(app): """Unload a loaded app. Args: app (:class:`App`): The app to unload. Returns: An op to unload the `app`. """ config = {types_pb2.APP_NAME: utils.s_to_attr(app.key)} op = Operation( app._session_id, types_pb2.UNLOAD_APP, config=config, output_types=types_pb2.APP ) return op def unload_graph(graph): """Unload a graph. Args: graph (:class:`Graph`): The graph to unload. Returns: An op to unload the `graph`. """ config = {types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key)} # Dynamic graph doesn't have a vineyard id if hasattr(graph, "vineyard_id"): config[types_pb2.VINEYARD_ID] = utils.i_to_attr(graph.vineyard_id) op = Operation( graph.session_id, types_pb2.UNLOAD_GRAPH, config=config, output_types=types_pb2.GRAPH, ) return op def context_to_numpy(results, selector=None, vertex_range=None, axis=0): """Retrieve results as a numpy ndarray. Args: results (:class:`Context`): Results return by `run_app` operation, store the query results. selector (str): Select the type of data to retrieve. vertex_range (str): Specify a range to retrieve. Returns: An op to retrieve query results and convert to numpy ndarray. """ config = { types_pb2.CTX_NAME: utils.s_to_attr(results.key), } if selector is not None: config[types_pb2.SELECTOR] = utils.s_to_attr(selector) if vertex_range is not None: config[types_pb2.VERTEX_RANGE] = utils.s_to_attr(vertex_range) if axis is not None: config[types_pb2.AXIS] = utils.i_to_attr(axis) op = Operation( results._session_id, types_pb2.CONTEXT_TO_NUMPY, config=config, output_types=types_pb2.TENSOR, ) return op def context_to_dataframe(results, selector=None, vertex_range=None): """Retrieve results as a pandas DataFrame. Args: results (:class:`Context`): Results return by `run_app` operation, store the query results. selector (str): Select the type of data to retrieve. vertex_range (str): Specify a range to retrieve. Returns: An op to retrieve query results and convert to pandas DataFrame. """ config = { types_pb2.CTX_NAME: utils.s_to_attr(results.key), } if selector is not None: config[types_pb2.SELECTOR] = utils.s_to_attr(selector) if vertex_range is not None: config[types_pb2.VERTEX_RANGE] = utils.s_to_attr(vertex_range) op = Operation( results._session_id, types_pb2.CONTEXT_TO_DATAFRAME, config=config, output_types=types_pb2.DATAFRAME, ) return op def to_vineyard_tensor(results, selector=None, vertex_range=None, axis=None): """Retrieve results as vineyard tensor. Parameters: results (:class:`Context`): Results return by `run_app` operation, store the query results. selector (str): Select the type of data to retrieve. vertex_range (str): Specify a range to retrieve. Returns: An op to convert query results into a vineyard tensor. """ config = { types_pb2.CTX_NAME: utils.s_to_attr(results.key), } if selector is not None: config[types_pb2.SELECTOR] = utils.s_to_attr(selector) if vertex_range is not None: config[types_pb2.VERTEX_RANGE] = utils.s_to_attr(vertex_range) if axis is not None: config[types_pb2.AXIS] = utils.i_to_attr(axis) op = Operation( results._session_id, types_pb2.TO_VINEYARD_TENSOR, config=config, output_types=types_pb2.VINEYARD_TENSOR, ) return op def to_vineyard_dataframe(results, selector=None, vertex_range=None): """Retrieve results as vineyard dataframe. Parameters: results (:class:`Context`): Results return by `run_app` operation, store the query results. selector (str): Select the type of data to retrieve. vertex_range (str): Specify a range to retrieve. Returns: An op to convert query results into a vineyard dataframe. """ config = { types_pb2.CTX_NAME: utils.s_to_attr(results.key), } if selector is not None: config[types_pb2.SELECTOR] = utils.s_to_attr(selector) if vertex_range is not None: config[types_pb2.VERTEX_RANGE] = utils.s_to_attr(vertex_range) op = Operation( results._session_id, types_pb2.TO_VINEYARD_DATAFRAME, config=config, output_types=types_pb2.VINEYARD_DATAFRAME, ) return op def add_column(graph, results, selector): """Add a column to `graph`, produce a new graph. Args: graph (:class:`Graph`): Source ArrowProperty graph. results (:class:`Context`): Results that generated by previous app querying. selector (str): Used to select a subrange of data of results, add them as one column of graph. Returns: A new graph with new columns added. """ config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), types_pb2.GRAPH_TYPE: utils.graph_type_to_attr(graph.graph_type), types_pb2.CTX_NAME: utils.s_to_attr(results.key), types_pb2.SELECTOR: utils.s_to_attr(selector), } op = Operation( graph.session_id, types_pb2.ADD_COLUMN, config=config, output_types=types_pb2.GRAPH, ) return op def graph_to_numpy(graph, selector=None, vertex_range=None): """Retrieve graph raw data as a numpy ndarray. Args: graph (:class:`Graph`): Source graph. selector (str): Select the type of data to retrieve. vertex_range (str): Specify a range to retrieve. Returns: An op to convert a graph's data to numpy ndarray. """ config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } if selector is not None: config[types_pb2.SELECTOR] = utils.s_to_attr(selector) if vertex_range is not None: config[types_pb2.VERTEX_RANGE] = utils.s_to_attr(vertex_range) op = Operation( graph.session_id, types_pb2.GRAPH_TO_NUMPY, config=config, output_types=types_pb2.TENSOR, ) return op def graph_to_dataframe(graph, selector=None, vertex_range=None): """Retrieve graph raw data as a pandas DataFrame. Args: graph (:class:`Graph`): Source graph. selector (str): Select the type of data to retrieve. vertex_range (str): Specify a range to retrieve. Returns: An op to convert a graph's data to pandas DataFrame. """ config = { types_pb2.GRAPH_NAME: utils.s_to_attr(graph.key), } if selector is not None: config[types_pb2.SELECTOR] = utils.s_to_attr(selector) if vertex_range is not None: config[types_pb2.VERTEX_RANGE] = utils.s_to_attr(vertex_range) op = Operation( graph.session_id, types_pb2.GRAPH_TO_DATAFRAME, config=config, output_types=types_pb2.DATAFRAME, ) return op
[ "graphscope.proto.query_args_pb2.QueryArgs", "graphscope.proto.attr_value_pb2.NameAttrList", "graphscope.framework.utils.normalize_data_type_str", "graphscope.framework.utils.report_type_to_attr", "graphscope.framework.utils.s_to_attr", "graphscope.framework.utils.bytes_to_attr", "graphscope.framework.utils.pack_query_params", "graphscope.proto.attr_value_pb2.AttrValue", "graphscope.framework.utils.i_to_attr", "graphscope.framework.utils.b_to_attr", "graphscope.framework.utils.graph_type_to_attr", "graphscope.framework.utils.modify_type_to_attr", "graphscope.framework.utils.data_type_to_cpp", "graphscope.framework.utils.list_i_to_attr", "graphscope.framework.utils.list_str_to_attr", "graphscope.framework.operation.Operation", "graphscope.framework.errors.check_argument" ]
[((2048, 2144), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.CREATE_APP'], {'config': 'config', 'output_types': 'types_pb2.APP'}), '(graph.session_id, types_pb2.CREATE_APP, config=config,\n output_types=types_pb2.APP)\n', (2057, 2144), False, 'from graphscope.framework.operation import Operation\n'), ((3414, 3509), 'graphscope.framework.operation.Operation', 'Operation', (['session_id', 'types_pb2.CREATE_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(session_id, types_pb2.CREATE_GRAPH, config=config, output_types=\n types_pb2.GRAPH)\n', (3423, 3509), False, 'from graphscope.framework.operation import Operation\n'), ((4596, 4703), 'graphscope.framework.operation.Operation', 'Operation', (['graph._session.session_id', 'types_pb2.ADD_LABELS'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph._session.session_id, types_pb2.ADD_LABELS, config=config,\n output_types=types_pb2.GRAPH)\n', (4605, 4703), False, 'from graphscope.framework.operation import Operation\n'), ((5063, 5125), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (5077, 5125), False, 'from graphscope.framework.errors import check_argument\n'), ((5771, 5811), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['types_pb2.UINT64'], {}), '(types_pb2.UINT64)\n', (5793, 5811), False, 'from graphscope.framework import utils\n'), ((6179, 6282), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.TRANSFORM_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.TRANSFORM_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (6188, 6282), False, 'from graphscope.framework.operation import Operation\n'), ((6633, 6693), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(graph.graph_type == types_pb2.ARROW_PROPERTY)\n', (6647, 6693), False, 'from graphscope.framework.errors import check_argument\n'), ((7088, 7191), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.TRANSFORM_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.TRANSFORM_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (7097, 7191), False, 'from graphscope.framework.operation import Operation\n'), ((7665, 7727), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (7679, 7727), False, 'from graphscope.framework.errors import check_argument\n'), ((7779, 7805), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (7794, 7805), False, 'from graphscope.framework import utils\n'), ((7842, 7880), 'graphscope.framework.utils.modify_type_to_attr', 'utils.modify_type_to_attr', (['modify_type'], {}), '(modify_type)\n', (7867, 7880), False, 'from graphscope.framework import utils\n'), ((7911, 7940), 'graphscope.framework.utils.list_str_to_attr', 'utils.list_str_to_attr', (['edges'], {}), '(edges)\n', (7933, 7940), False, 'from graphscope.framework import utils\n'), ((7950, 8050), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.MODIFY_EDGES'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.MODIFY_EDGES, config=config,\n output_types=types_pb2.GRAPH)\n', (7959, 8050), False, 'from graphscope.framework.operation import Operation\n'), ((8472, 8534), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (8486, 8534), False, 'from graphscope.framework.errors import check_argument\n'), ((8586, 8612), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (8601, 8612), False, 'from graphscope.framework import utils\n'), ((8649, 8687), 'graphscope.framework.utils.modify_type_to_attr', 'utils.modify_type_to_attr', (['modify_type'], {}), '(modify_type)\n', (8674, 8687), False, 'from graphscope.framework import utils\n'), ((8718, 8750), 'graphscope.framework.utils.list_str_to_attr', 'utils.list_str_to_attr', (['vertices'], {}), '(vertices)\n', (8740, 8750), False, 'from graphscope.framework import utils\n'), ((8760, 8863), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.MODIFY_VERTICES'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.MODIFY_VERTICES, config=config,\n output_types=types_pb2.GRAPH)\n', (8769, 8863), False, 'from graphscope.framework.operation import Operation\n'), ((9668, 9698), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['output_prefix'], {}), '(output_prefix)\n', (9683, 9698), False, 'from graphscope.framework import utils\n'), ((9744, 9784), 'graphscope.framework.utils.pack_query_params', 'utils.pack_query_params', (['*args'], {}), '(*args, **kwargs)\n', (9767, 9784), False, 'from graphscope.framework import utils\n'), ((9802, 9828), 'graphscope.proto.query_args_pb2.QueryArgs', 'query_args_pb2.QueryArgs', ([], {}), '()\n', (9826, 9828), False, 'from graphscope.proto import query_args_pb2\n'), ((9874, 9995), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.RUN_APP'], {'config': 'config', 'output_types': 'types_pb2.RESULTS', 'query_args': 'query_args'}), '(graph.session_id, types_pb2.RUN_APP, config=config, output_types=\n types_pb2.RESULTS, query_args=query_args)\n', (9883, 9995), False, 'from graphscope.framework.operation import Operation\n'), ((12122, 12224), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.REPORT_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.RESULTS'}), '(graph.session_id, types_pb2.REPORT_GRAPH, config=config,\n output_types=types_pb2.RESULTS)\n', (12131, 12224), False, 'from graphscope.framework.operation import Operation\n'), ((12359, 12419), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(graph.graph_type == types_pb2.ARROW_PROPERTY)\n', (12373, 12419), False, 'from graphscope.framework.errors import check_argument\n'), ((12431, 12457), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {}), '()\n', (12455, 12457), False, 'from graphscope.proto import attr_value_pb2\n'), ((12471, 12500), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (12498, 12500), False, 'from graphscope.proto import attr_value_pb2\n'), ((12514, 12543), 'graphscope.proto.attr_value_pb2.NameAttrList', 'attr_value_pb2.NameAttrList', ([], {}), '()\n', (12541, 12543), False, 'from graphscope.proto import attr_value_pb2\n'), ((13034, 13135), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.PROJECT_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.PROJECT_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (13043, 13135), False, 'from graphscope.framework.operation import Operation\n'), ((13935, 13995), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.ARROW_PROPERTY)'], {}), '(graph.graph_type == types_pb2.ARROW_PROPERTY)\n', (13949, 13995), False, 'from graphscope.framework.errors import check_argument\n'), ((14679, 14784), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.PROJECT_TO_SIMPLE'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.PROJECT_TO_SIMPLE, config=config,\n output_types=types_pb2.GRAPH)\n', (14688, 14784), False, 'from graphscope.framework.operation import Operation\n'), ((15363, 15425), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (15377, 15425), False, 'from graphscope.framework.errors import check_argument\n'), ((15880, 15985), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.PROJECT_TO_SIMPLE'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.PROJECT_TO_SIMPLE, config=config,\n output_types=types_pb2.GRAPH)\n', (15889, 15985), False, 'from graphscope.framework.operation import Operation\n'), ((16417, 16512), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type in (types_pb2.ARROW_PROPERTY, types_pb2.DYNAMIC_PROPERTY))'], {}), '(graph.graph_type in (types_pb2.ARROW_PROPERTY, types_pb2.\n DYNAMIC_PROPERTY))\n', (16431, 16512), False, 'from graphscope.framework.errors import check_argument\n'), ((16526, 16579), 'graphscope.framework.errors.check_argument', 'check_argument', (["(copy_type in ('identical', 'reverse'))"], {}), "(copy_type in ('identical', 'reverse'))\n", (16540, 16579), False, 'from graphscope.framework.errors import check_argument\n'), ((16726, 16824), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.COPY_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.COPY_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (16735, 16824), False, 'from graphscope.framework.operation import Operation\n'), ((17052, 17114), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (17066, 17114), False, 'from graphscope.framework.errors import check_argument\n'), ((17204, 17303), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.TO_DIRECTED'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.TO_DIRECTED, config=config,\n output_types=types_pb2.GRAPH)\n', (17213, 17303), False, 'from graphscope.framework.operation import Operation\n'), ((17535, 17597), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (17549, 17597), False, 'from graphscope.framework.errors import check_argument\n'), ((17687, 17788), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.TO_UNDIRECTED'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.TO_UNDIRECTED, config=config,\n output_types=types_pb2.GRAPH)\n', (17696, 17788), False, 'from graphscope.framework.operation import Operation\n'), ((18215, 18277), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (18229, 18277), False, 'from graphscope.framework.errors import check_argument\n'), ((18282, 18349), 'graphscope.framework.errors.check_argument', 'check_argument', (["(view_type in ('reversed', 'directed', 'undirected'))"], {}), "(view_type in ('reversed', 'directed', 'undirected'))\n", (18296, 18349), False, 'from graphscope.framework.errors import check_argument\n'), ((18496, 18594), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.VIEW_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.VIEW_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (18505, 18594), False, 'from graphscope.framework.operation import Operation\n'), ((18848, 18910), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (18862, 18910), False, 'from graphscope.framework.errors import check_argument\n'), ((18999, 19098), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.CLEAR_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.CLEAR_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (19008, 19098), False, 'from graphscope.framework.operation import Operation\n'), ((19352, 19414), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (19366, 19414), False, 'from graphscope.framework.errors import check_argument\n'), ((19503, 19602), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.CLEAR_EDGES'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.CLEAR_EDGES, config=config,\n output_types=types_pb2.GRAPH)\n', (19512, 19602), False, 'from graphscope.framework.operation import Operation\n'), ((19976, 20038), 'graphscope.framework.errors.check_argument', 'check_argument', (['(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)'], {}), '(graph.graph_type == types_pb2.DYNAMIC_PROPERTY)\n', (19990, 20038), False, 'from graphscope.framework.errors import check_argument\n'), ((20308, 20411), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.INDUCE_SUBGRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.INDUCE_SUBGRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (20317, 20411), False, 'from graphscope.framework.operation import Operation\n'), ((20696, 20791), 'graphscope.framework.operation.Operation', 'Operation', (['app._session_id', 'types_pb2.UNLOAD_APP'], {'config': 'config', 'output_types': 'types_pb2.APP'}), '(app._session_id, types_pb2.UNLOAD_APP, config=config,\n output_types=types_pb2.APP)\n', (20705, 20791), False, 'from graphscope.framework.operation import Operation\n'), ((21222, 21322), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.UNLOAD_GRAPH'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.UNLOAD_GRAPH, config=config,\n output_types=types_pb2.GRAPH)\n', (21231, 21322), False, 'from graphscope.framework.operation import Operation\n'), ((22176, 22284), 'graphscope.framework.operation.Operation', 'Operation', (['results._session_id', 'types_pb2.CONTEXT_TO_NUMPY'], {'config': 'config', 'output_types': 'types_pb2.TENSOR'}), '(results._session_id, types_pb2.CONTEXT_TO_NUMPY, config=config,\n output_types=types_pb2.TENSOR)\n', (22185, 22284), False, 'from graphscope.framework.operation import Operation\n'), ((23060, 23176), 'graphscope.framework.operation.Operation', 'Operation', (['results._session_id', 'types_pb2.CONTEXT_TO_DATAFRAME'], {'config': 'config', 'output_types': 'types_pb2.DATAFRAME'}), '(results._session_id, types_pb2.CONTEXT_TO_DATAFRAME, config=\n config, output_types=types_pb2.DATAFRAME)\n', (23069, 23176), False, 'from graphscope.framework.operation import Operation\n'), ((24032, 24151), 'graphscope.framework.operation.Operation', 'Operation', (['results._session_id', 'types_pb2.TO_VINEYARD_TENSOR'], {'config': 'config', 'output_types': 'types_pb2.VINEYARD_TENSOR'}), '(results._session_id, types_pb2.TO_VINEYARD_TENSOR, config=config,\n output_types=types_pb2.VINEYARD_TENSOR)\n', (24041, 24151), False, 'from graphscope.framework.operation import Operation\n'), ((24926, 25052), 'graphscope.framework.operation.Operation', 'Operation', (['results._session_id', 'types_pb2.TO_VINEYARD_DATAFRAME'], {'config': 'config', 'output_types': 'types_pb2.VINEYARD_DATAFRAME'}), '(results._session_id, types_pb2.TO_VINEYARD_DATAFRAME, config=\n config, output_types=types_pb2.VINEYARD_DATAFRAME)\n', (24935, 25052), False, 'from graphscope.framework.operation import Operation\n'), ((25810, 25908), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.ADD_COLUMN'], {'config': 'config', 'output_types': 'types_pb2.GRAPH'}), '(graph.session_id, types_pb2.ADD_COLUMN, config=config,\n output_types=types_pb2.GRAPH)\n', (25819, 25908), False, 'from graphscope.framework.operation import Operation\n'), ((26611, 26714), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.GRAPH_TO_NUMPY'], {'config': 'config', 'output_types': 'types_pb2.TENSOR'}), '(graph.session_id, types_pb2.GRAPH_TO_NUMPY, config=config,\n output_types=types_pb2.TENSOR)\n', (26620, 26714), False, 'from graphscope.framework.operation import Operation\n'), ((27427, 27537), 'graphscope.framework.operation.Operation', 'Operation', (['graph.session_id', 'types_pb2.GRAPH_TO_DATAFRAME'], {'config': 'config', 'output_types': 'types_pb2.DATAFRAME'}), '(graph.session_id, types_pb2.GRAPH_TO_DATAFRAME, config=config,\n output_types=types_pb2.DATAFRAME)\n', (27436, 27537), False, 'from graphscope.framework.operation import Operation\n'), ((1414, 1439), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['app.algo'], {}), '(app.algo)\n', (1429, 1439), False, 'from graphscope.framework import utils\n'), ((1471, 1513), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['graph.graph_type'], {}), '(graph.graph_type)\n', (1495, 1513), False, 'from graphscope.framework import utils\n'), ((1664, 1702), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.schema.vid_type'], {}), '(graph.schema.vid_type)\n', (1679, 1702), False, 'from graphscope.framework import utils\n'), ((2008, 2036), 'graphscope.framework.utils.bytes_to_attr', 'utils.bytes_to_attr', (['app.gar'], {}), '(app.gar)\n', (2027, 2036), False, 'from graphscope.framework import utils\n'), ((2693, 2729), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['graph_type'], {}), '(graph_type)\n', (2717, 2729), False, 'from graphscope.framework import utils\n'), ((4082, 4109), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph._key'], {}), '(graph._key)\n', (4097, 4109), False, 'from graphscope.framework import utils\n'), ((4141, 4184), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['graph._graph_type'], {}), '(graph._graph_type)\n', (4165, 4184), False, 'from graphscope.framework import utils\n'), ((5551, 5590), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['types_pb2.INT64'], {}), '(types_pb2.INT64)\n', (5573, 5590), False, 'from graphscope.framework import utils\n'), ((5857, 5883), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (5872, 5883), False, 'from graphscope.framework import utils\n'), ((5915, 5965), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['types_pb2.ARROW_PROPERTY'], {}), '(types_pb2.ARROW_PROPERTY)\n', (5939, 5965), False, 'from graphscope.framework import utils\n'), ((6001, 6051), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['types_pb2.ARROW_PROPERTY'], {}), '(types_pb2.ARROW_PROPERTY)\n', (6025, 6051), False, 'from graphscope.framework import utils\n'), ((6081, 6106), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['oid_type'], {}), '(oid_type)\n', (6096, 6106), False, 'from graphscope.framework import utils\n'), ((6136, 6161), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vid_type'], {}), '(vid_type)\n', (6151, 6161), False, 'from graphscope.framework import utils\n'), ((6739, 6765), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (6754, 6765), False, 'from graphscope.framework import utils\n'), ((6797, 6847), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['types_pb2.ARROW_PROPERTY'], {}), '(types_pb2.ARROW_PROPERTY)\n', (6821, 6847), False, 'from graphscope.framework import utils\n'), ((6883, 6935), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['types_pb2.DYNAMIC_PROPERTY'], {}), '(types_pb2.DYNAMIC_PROPERTY)\n', (6907, 6935), False, 'from graphscope.framework import utils\n'), ((6965, 7003), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.schema.oid_type'], {}), '(graph.schema.oid_type)\n', (6980, 7003), False, 'from graphscope.framework import utils\n'), ((7033, 7071), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.schema.vid_type'], {}), '(graph.schema.vid_type)\n', (7048, 7071), False, 'from graphscope.framework import utils\n'), ((9489, 9515), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (9504, 9515), False, 'from graphscope.framework import utils\n'), ((9545, 9569), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['app.key'], {}), '(app.key)\n', (9560, 9569), False, 'from graphscope.framework import utils\n'), ((11607, 11633), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (11622, 11633), False, 'from graphscope.framework import utils\n'), ((11666, 11704), 'graphscope.framework.utils.report_type_to_attr', 'utils.report_type_to_attr', (['report_type'], {}), '(report_type)\n', (11691, 11704), False, 'from graphscope.framework import utils\n'), ((11770, 11791), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['node'], {}), '(node)\n', (11785, 11791), False, 'from graphscope.framework import utils\n'), ((11850, 11871), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['edge'], {}), '(edge)\n', (11865, 11871), False, 'from graphscope.framework import utils\n'), ((11928, 11948), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['fid'], {}), '(fid)\n', (11943, 11948), False, 'from graphscope.framework import utils\n'), ((12005, 12025), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['lid'], {}), '(lid)\n', (12020, 12025), False, 'from graphscope.framework import utils\n'), ((12866, 12892), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (12881, 12892), False, 'from graphscope.framework import utils\n'), ((12924, 12966), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['graph.graph_type'], {}), '(graph.graph_type)\n', (12948, 12966), False, 'from graphscope.framework import utils\n'), ((14041, 14067), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (14056, 14067), False, 'from graphscope.framework import utils\n'), ((14099, 14150), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['types_pb2.ARROW_PROJECTED'], {}), '(types_pb2.ARROW_PROJECTED)\n', (14123, 14150), False, 'from graphscope.framework import utils\n'), ((14182, 14209), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['v_label_id'], {}), '(v_label_id)\n', (14197, 14209), False, 'from graphscope.framework import utils\n'), ((14240, 14266), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['v_prop_id'], {}), '(v_prop_id)\n', (14255, 14266), False, 'from graphscope.framework import utils\n'), ((14298, 14325), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['e_label_id'], {}), '(e_label_id)\n', (14313, 14325), False, 'from graphscope.framework import utils\n'), ((14356, 14382), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['e_prop_id'], {}), '(e_prop_id)\n', (14371, 14382), False, 'from graphscope.framework import utils\n'), ((14412, 14437), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['oid_type'], {}), '(oid_type)\n', (14427, 14437), False, 'from graphscope.framework import utils\n'), ((14467, 14492), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vid_type'], {}), '(vid_type)\n', (14482, 14492), False, 'from graphscope.framework import utils\n'), ((15471, 15497), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (15486, 15497), False, 'from graphscope.framework import utils\n'), ((15529, 15582), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['types_pb2.DYNAMIC_PROJECTED'], {}), '(types_pb2.DYNAMIC_PROJECTED)\n', (15553, 15582), False, 'from graphscope.framework import utils\n'), ((15614, 15637), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['v_prop'], {}), '(v_prop)\n', (15629, 15637), False, 'from graphscope.framework import utils\n'), ((15669, 15692), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['e_prop'], {}), '(e_prop)\n', (15684, 15692), False, 'from graphscope.framework import utils\n'), ((16625, 16651), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (16640, 16651), False, 'from graphscope.framework import utils\n'), ((16682, 16708), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['copy_type'], {}), '(copy_type)\n', (16697, 16708), False, 'from graphscope.framework import utils\n'), ((17160, 17186), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (17175, 17186), False, 'from graphscope.framework import utils\n'), ((17643, 17669), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (17658, 17669), False, 'from graphscope.framework import utils\n'), ((18395, 18421), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (18410, 18421), False, 'from graphscope.framework import utils\n'), ((18452, 18478), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['view_type'], {}), '(view_type)\n', (18467, 18478), False, 'from graphscope.framework import utils\n'), ((18956, 18982), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (18971, 18982), False, 'from graphscope.framework import utils\n'), ((19460, 19486), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (19475, 19486), False, 'from graphscope.framework import utils\n'), ((20084, 20110), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (20099, 20110), False, 'from graphscope.framework import utils\n'), ((20178, 20207), 'graphscope.framework.utils.list_str_to_attr', 'utils.list_str_to_attr', (['nodes'], {}), '(nodes)\n', (20200, 20207), False, 'from graphscope.framework import utils\n'), ((20268, 20297), 'graphscope.framework.utils.list_str_to_attr', 'utils.list_str_to_attr', (['edges'], {}), '(edges)\n', (20290, 20297), False, 'from graphscope.framework import utils\n'), ((20661, 20685), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['app.key'], {}), '(app.key)\n', (20676, 20685), False, 'from graphscope.framework import utils\n'), ((21025, 21051), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (21040, 21051), False, 'from graphscope.framework import utils\n'), ((21178, 21212), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['graph.vineyard_id'], {}), '(graph.vineyard_id)\n', (21193, 21212), False, 'from graphscope.framework import utils\n'), ((21855, 21883), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['results.key'], {}), '(results.key)\n', (21870, 21883), False, 'from graphscope.framework import utils\n'), ((21957, 21982), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (21972, 21982), False, 'from graphscope.framework import utils\n'), ((22057, 22086), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex_range'], {}), '(vertex_range)\n', (22072, 22086), False, 'from graphscope.framework import utils\n'), ((22145, 22166), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['axis'], {}), '(axis)\n', (22160, 22166), False, 'from graphscope.framework import utils\n'), ((22819, 22847), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['results.key'], {}), '(results.key)\n', (22834, 22847), False, 'from graphscope.framework import utils\n'), ((22921, 22946), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (22936, 22946), False, 'from graphscope.framework import utils\n'), ((23021, 23050), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex_range'], {}), '(vertex_range)\n', (23036, 23050), False, 'from graphscope.framework import utils\n'), ((23711, 23739), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['results.key'], {}), '(results.key)\n', (23726, 23739), False, 'from graphscope.framework import utils\n'), ((23813, 23838), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (23828, 23838), False, 'from graphscope.framework import utils\n'), ((23913, 23942), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex_range'], {}), '(vertex_range)\n', (23928, 23942), False, 'from graphscope.framework import utils\n'), ((24001, 24022), 'graphscope.framework.utils.i_to_attr', 'utils.i_to_attr', (['axis'], {}), '(axis)\n', (24016, 24022), False, 'from graphscope.framework import utils\n'), ((24685, 24713), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['results.key'], {}), '(results.key)\n', (24700, 24713), False, 'from graphscope.framework import utils\n'), ((24787, 24812), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (24802, 24812), False, 'from graphscope.framework import utils\n'), ((24887, 24916), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex_range'], {}), '(vertex_range)\n', (24902, 24916), False, 'from graphscope.framework import utils\n'), ((25580, 25606), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (25595, 25606), False, 'from graphscope.framework import utils\n'), ((25638, 25680), 'graphscope.framework.utils.graph_type_to_attr', 'utils.graph_type_to_attr', (['graph.graph_type'], {}), '(graph.graph_type)\n', (25662, 25680), False, 'from graphscope.framework import utils\n'), ((25710, 25738), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['results.key'], {}), '(results.key)\n', (25725, 25738), False, 'from graphscope.framework import utils\n'), ((25768, 25793), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (25783, 25793), False, 'from graphscope.framework import utils\n'), ((26372, 26398), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (26387, 26398), False, 'from graphscope.framework import utils\n'), ((26472, 26497), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (26487, 26497), False, 'from graphscope.framework import utils\n'), ((26572, 26601), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex_range'], {}), '(vertex_range)\n', (26587, 26601), False, 'from graphscope.framework import utils\n'), ((27188, 27214), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['graph.key'], {}), '(graph.key)\n', (27203, 27214), False, 'from graphscope.framework import utils\n'), ((27288, 27313), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['selector'], {}), '(selector)\n', (27303, 27313), False, 'from graphscope.framework import utils\n'), ((27388, 27417), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['vertex_range'], {}), '(vertex_range)\n', (27403, 27417), False, 'from graphscope.framework import utils\n'), ((1572, 1624), 'graphscope.framework.utils.normalize_data_type_str', 'utils.normalize_data_type_str', (['graph.schema.oid_type'], {}), '(graph.schema.oid_type)\n', (1601, 1624), False, 'from graphscope.framework import utils\n'), ((1764, 1811), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['graph.schema.vdata_type'], {}), '(graph.schema.vdata_type)\n', (1786, 1811), False, 'from graphscope.framework import utils\n'), ((1883, 1930), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['graph.schema.edata_type'], {}), '(graph.schema.edata_type)\n', (1905, 1930), False, 'from graphscope.framework import utils\n'), ((3064, 3096), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (["kwargs['efile']"], {}), "(kwargs['efile'])\n", (3079, 3096), False, 'from graphscope.framework import utils\n'), ((3132, 3164), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (["kwargs['vfile']"], {}), "(kwargs['vfile'])\n", (3147, 3164), False, 'from graphscope.framework import utils\n'), ((3202, 3237), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (["kwargs['directed']"], {}), "(kwargs['directed'])\n", (3217, 3237), False, 'from graphscope.framework import utils\n'), ((3278, 3316), 'graphscope.framework.utils.b_to_attr', 'utils.b_to_attr', (["kwargs['distributed']"], {}), "(kwargs['distributed'])\n", (3293, 3316), False, 'from graphscope.framework import utils\n'), ((5636, 5676), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['types_pb2.STRING'], {}), '(types_pb2.STRING)\n', (5658, 5676), False, 'from graphscope.framework import utils\n'), ((12632, 12659), 'graphscope.framework.utils.list_i_to_attr', 'utils.list_i_to_attr', (['props'], {}), '(props)\n', (12652, 12659), False, 'from graphscope.framework import utils\n'), ((12747, 12774), 'graphscope.framework.utils.list_i_to_attr', 'utils.list_i_to_attr', (['props'], {}), '(props)\n', (12767, 12774), False, 'from graphscope.framework import utils\n'), ((14541, 14576), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['v_data_type'], {}), '(v_data_type)\n', (14563, 14576), False, 'from graphscope.framework import utils\n'), ((14626, 14661), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['e_data_type'], {}), '(e_data_type)\n', (14648, 14661), False, 'from graphscope.framework import utils\n'), ((15741, 15776), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['v_prop_type'], {}), '(v_prop_type)\n', (15763, 15776), False, 'from graphscope.framework import utils\n'), ((15826, 15861), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['e_prop_type'], {}), '(e_prop_type)\n', (15848, 15861), False, 'from graphscope.framework import utils\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file convert_matrix.py is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/convert_matrix.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # from collections import defaultdict import networkx.convert_matrix from graphscope import nx from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import patch_docstring import_as_graphscope_nx(networkx.convert_matrix) def from_pandas_edgelist( df, source="source", target="target", edge_attr=None, create_using=None, edge_key=None, ): g = nx.empty_graph(0, create_using) if edge_attr is None: g.add_edges_from(zip(df[source], df[target])) return g reserved_columns = [source, target] # Additional columns requested attr_col_headings = [] attribute_data = [] if edge_attr is True: attr_col_headings = [c for c in df.columns if c not in reserved_columns] elif isinstance(edge_attr, (list, tuple)): attr_col_headings = edge_attr else: attr_col_headings = [edge_attr] if len(attr_col_headings) == 0: raise nx.NetworkXError( f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}" ) try: attribute_data = zip(*[df[col] for col in attr_col_headings]) except (KeyError, TypeError) as e: msg = f"Invalid edge_attr argument: {edge_attr}" raise nx.NetworkXError(msg) from e if g.is_multigraph(): # => append the edge keys from the df to the bundled data if edge_key is not None: try: multigraph_edge_keys = df[edge_key] attribute_data = zip(attribute_data, multigraph_edge_keys) except (KeyError, TypeError) as e: msg = f"Invalid edge_key argument: {edge_key}" raise nx.NetworkXError(msg) from e for s, t, attrs in zip(df[source], df[target], attribute_data): if edge_key is not None: attrs, multigraph_edge_key = attrs key = g.add_edge(s, t, key=multigraph_edge_key) else: key = g.add_edge(s, t) g[s][t][key].update(zip(attr_col_headings, attrs)) else: edges = [] for s, t, attrs in zip(df[source], df[target], attribute_data): edges.append((s, t, zip(attr_col_headings, attrs))) g.add_edges_from(edges) return g def to_numpy_array( G, nodelist=None, dtype=None, order=None, multigraph_weight=sum, weight="weight", nonedge=0.0, ): import numpy as np if nodelist is None: nodelist = list(G) nodeset = G nlen = len(G) else: nlen = len(nodelist) nodeset = set(G.nbunch_iter(nodelist)) if nlen != len(nodeset): for n in nodelist: if n not in G: raise nx.NetworkXError(f"Node {n} in nodelist is not in G") raise nx.NetworkXError("nodelist contains duplicates.") A = np.full((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order) # Corner cases: empty nodelist or graph without any edges if nlen == 0 or G.number_of_edges() == 0: return A # If dtype is structured and weight is None, use dtype field names as # edge attributes edge_attrs = None # Only single edge attribute by default if A.dtype.names: if weight is None: edge_attrs = dtype.names else: raise ValueError( "Specifying `weight` not supported for structured dtypes\n." "To create adjacency matrices from structured dtypes, use `weight=None`." ) idx = dict(zip(sorted(nodelist), range(nlen))) if len(nodelist) < len(G): G = G.subgraph(nodelist) # A real subgraph, not view # Collect all edge weights and reduce with `multigraph_weights` if G.is_multigraph(): if edge_attrs: raise nx.NetworkXError( "Structured arrays are not supported for MultiGraphs" ) d = defaultdict(list) for u, v, wt in G.edges(data=weight, default=1.0): d[(idx[u], idx[v])].append(wt) i, j = np.array(list(d.keys())).T # indices wts = [multigraph_weight(ws) for ws in d.values()] # reduced weights else: i, j, wts = [], [], [] # Special branch: multi-attr adjacency from structured dtypes if edge_attrs: # Extract edges with all data for u, v, data in G.edges(data=True): i.append(idx[u]) j.append(idx[v]) wts.append(data) # Map each attribute to the appropriate named field in the # structured dtype for attr in edge_attrs: attr_data = [wt.get(attr, 1.0) for wt in wts] A[attr][i, j] = attr_data if not G.is_directed(): A[attr][j, i] = attr_data return A for u, v, wt in G.edges(data=weight, default=1.0): i.append(idx[u]) j.append(idx[v]) wts.append(wt) # Set array values with advanced indexing A[i, j] = wts if not G.is_directed(): A[j, i] = wts return A def to_numpy_matrix( G, nodelist=None, dtype=None, order=None, multigraph_weight=sum, weight="weight", nonedge=0.0, ): import numpy as np A = to_numpy_array( G, nodelist=nodelist, dtype=dtype, order=order, multigraph_weight=multigraph_weight, weight=weight, nonedge=nonedge, ) M = np.asmatrix(A, dtype=dtype) return M def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, weight="weight", format="csr"): import scipy as sp import scipy.sparse A = to_scipy_sparse_array( G, nodelist=nodelist, dtype=dtype, weight=weight, format=format ) return sp.sparse.csr_matrix(A).asformat(format) def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"): import scipy as sp import scipy.sparse # call as sp.sparse if len(G) == 0: raise nx.NetworkXError("Graph has no nodes or edges") if nodelist is None: nodelist = sorted(G) nlen = len(G) else: nlen = len(nodelist) if nlen == 0: raise nx.NetworkXError("nodelist has no nodes") nodeset = set(G.nbunch_iter(nodelist)) if nlen != len(nodeset): for n in nodelist: if n not in G: raise nx.NetworkXError(f"Node {n} in nodelist is not in G") raise nx.NetworkXError("nodelist contains duplicates.") if nlen < len(G): G = G.subgraph(nodelist) index = dict(zip(nodelist, range(nlen))) coefficients = zip( *((index[u], index[v], wt) for u, v, wt in G.edges(data=weight, default=1)) ) try: row, col, data = coefficients except ValueError: # there is no edge in the subgraph row, col, data = [], [], [] if G.is_directed(): A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, nlen), dtype=dtype) else: # symmetrize matrix d = data + data r = row + col c = col + row # selfloop entries get double counted when symmetrizing # so we subtract the data on the diagonal selfloops = list(nx.selfloop_edges(G, data=weight, default=1)) if selfloops: diag_index, diag_data = zip(*((index[u], -wt) for u, v, wt in selfloops)) d += diag_data r += diag_index c += diag_index A = sp.sparse.coo_array((d, (r, c)), shape=(nlen, nlen), dtype=dtype) try: return A.asformat(format) except ValueError as err: raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err
[ "scipy.sparse.csr_matrix", "scipy.sparse.coo_array", "graphscope.nx.empty_graph", "numpy.asmatrix", "graphscope.nx.selfloop_edges", "graphscope.nx.NetworkXError", "collections.defaultdict", "graphscope.nx.utils.compat.import_as_graphscope_nx", "numpy.full" ]
[((714, 762), 'graphscope.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.convert_matrix'], {}), '(networkx.convert_matrix)\n', (737, 762), False, 'from graphscope.nx.utils.compat import import_as_graphscope_nx\n'), ((914, 945), 'graphscope.nx.empty_graph', 'nx.empty_graph', (['(0)', 'create_using'], {}), '(0, create_using)\n', (928, 945), False, 'from graphscope import nx\n'), ((3394, 3461), 'numpy.full', 'np.full', (['(nlen, nlen)'], {'fill_value': 'nonedge', 'dtype': 'dtype', 'order': 'order'}), '((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order)\n', (3401, 3461), True, 'import numpy as np\n'), ((6038, 6065), 'numpy.asmatrix', 'np.asmatrix', (['A'], {'dtype': 'dtype'}), '(A, dtype=dtype)\n', (6049, 6065), True, 'import numpy as np\n'), ((1464, 1570), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['f"""Invalid edge_attr argument: No columns found with name: {attr_col_headings}"""'], {}), "(\n f'Invalid edge_attr argument: No columns found with name: {attr_col_headings}'\n )\n", (1480, 1570), False, 'from graphscope import nx\n'), ((4454, 4471), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (4465, 4471), False, 'from collections import defaultdict\n'), ((6572, 6619), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Graph has no nodes or edges"""'], {}), "('Graph has no nodes or edges')\n", (6588, 6619), False, 'from graphscope import nx\n'), ((7517, 7589), 'scipy.sparse.coo_array', 'sp.sparse.coo_array', (['(data, (row, col))'], {'shape': '(nlen, nlen)', 'dtype': 'dtype'}), '((data, (row, col)), shape=(nlen, nlen), dtype=dtype)\n', (7536, 7589), True, 'import scipy as sp\n'), ((8084, 8149), 'scipy.sparse.coo_array', 'sp.sparse.coo_array', (['(d, (r, c))'], {'shape': '(nlen, nlen)', 'dtype': 'dtype'}), '((d, (r, c)), shape=(nlen, nlen), dtype=dtype)\n', (8103, 8149), True, 'import scipy as sp\n'), ((1773, 1794), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['msg'], {}), '(msg)\n', (1789, 1794), False, 'from graphscope import nx\n'), ((3335, 3384), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""nodelist contains duplicates."""'], {}), "('nodelist contains duplicates.')\n", (3351, 3384), False, 'from graphscope import nx\n'), ((4340, 4411), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Structured arrays are not supported for MultiGraphs"""'], {}), "('Structured arrays are not supported for MultiGraphs')\n", (4356, 4411), False, 'from graphscope import nx\n'), ((6338, 6361), 'scipy.sparse.csr_matrix', 'sp.sparse.csr_matrix', (['A'], {}), '(A)\n', (6358, 6361), True, 'import scipy as sp\n'), ((6776, 6817), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""nodelist has no nodes"""'], {}), "('nodelist has no nodes')\n", (6792, 6817), False, 'from graphscope import nx\n'), ((7058, 7107), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""nodelist contains duplicates."""'], {}), "('nodelist contains duplicates.')\n", (7074, 7107), False, 'from graphscope import nx\n'), ((7835, 7879), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': 'weight', 'default': '(1)'}), '(G, data=weight, default=1)\n', (7852, 7879), False, 'from graphscope import nx\n'), ((8237, 8296), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['f"""Unknown sparse matrix format: {format}"""'], {}), "(f'Unknown sparse matrix format: {format}')\n", (8253, 8296), False, 'from graphscope import nx\n'), ((2204, 2225), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['msg'], {}), '(msg)\n', (2220, 2225), False, 'from graphscope import nx\n'), ((3263, 3316), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['f"""Node {n} in nodelist is not in G"""'], {}), "(f'Node {n} in nodelist is not in G')\n", (3279, 3316), False, 'from graphscope import nx\n'), ((6986, 7039), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['f"""Node {n} in nodelist is not in G"""'], {}), "(f'Node {n} in nodelist is not in G')\n", (7002, 7039), False, 'from graphscope import nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import pytest from networkx.generators.tests.test_directed import TestGeneratorsDirected from networkx.generators.tests.test_directed import TestRandomKOutGraph from networkx.generators.tests.test_directed import TestUniformRandomKOutGraph import graphscope.nx as nx from graphscope.nx.classes import Graph from graphscope.nx.classes import MultiDiGraph from graphscope.nx.generators.directed import gn_graph from graphscope.nx.generators.directed import gnc_graph from graphscope.nx.generators.directed import gnr_graph from graphscope.nx.generators.directed import random_k_out_graph from graphscope.nx.generators.directed import random_uniform_k_out_graph from graphscope.nx.generators.directed import scale_free_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGeneratorsDirected) class TestGeneratorsDirected: pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestRandomKOutGraph) class TestRandomKOutGraph: pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestUniformRandomKOutGraph) class TestUniformRandomKOutGraph: pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((1459, 1504), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1482, 1504), False, 'import pytest\n'), ((1506, 1556), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGeneratorsDirected'], {}), '(TestGeneratorsDirected)\n', (1532, 1556), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1599, 1644), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1622, 1644), False, 'import pytest\n'), ((1646, 1693), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestRandomKOutGraph'], {}), '(TestRandomKOutGraph)\n', (1672, 1693), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1733, 1778), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1756, 1778), False, 'import pytest\n'), ((1780, 1834), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestUniformRandomKOutGraph'], {}), '(TestUniformRandomKOutGraph)\n', (1806, 1834), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ An almost-complete Python to Cython AST transformer, with injected GRAPHSCOPE-specific translation. Python AST nodes are translated to corresponding Cython AST nodes as it is, except: 1. for top-level method, a Cython type annotation is attached to the function signature, for example, .. code:: python @graphscope.analytical.udf.peval('sssp') def PEval(frag, context): ... will be translated as: .. code:: cython cdef public void IncEval(Fragment *frag, ComputeContext *context): ... it will make Cython understand what we really want and generate proper Cpp code. 2. for invokation on methods inside :code:`graphscope.analytical.udf.core`, we generate proper special :code:`cdef` defintions, or proper Cpp invokations, just like :code:`cython.declare`, for example, .. code:: python heap = graphscope.analytical.udf.heap((float, 'node')) modified = lang.vector(bool, [False for _ in range(inner_vertices.size())]) will be translated as: .. code:: cython cdef priority_queue[pair[double, NodeT]] heap cdef vector[bool] modified([False for _ in range(inner_vertices.size())]) Note that :code:`float` in Python is mapped to :code:`double` in Cython (further in Cpp code). More specifically, we define a series of placeholders in module :code:`graphscope.analytical.udf.core`, which cannot be executed in pure python mode. The :code:`graphscope.analytical.udf.xxx` decorators will translate those ordinary *"assignment and call"* into a :code:`cdef` node in Cython AST. """ import ast import copy import functools import inspect import textwrap import types import warnings from Cython.CodeWriter import CodeWriter from Cython.Compiler import Builtin from Cython.Compiler import StringEncoding from Cython.Compiler.ExprNodes import * from Cython.Compiler.ModuleNode import * from Cython.Compiler.Nodes import * from graphscope.analytical.udf.patch import patch_cython_codewriter from graphscope.analytical.udf.utils import CType from graphscope.analytical.udf.utils import ExpectFuncDef from graphscope.analytical.udf.utils import LinesWrapper from graphscope.analytical.udf.utils import PregelAggregatorType from graphscope.analytical.udf.utils import ProgramModel from graphscope.framework.errors import check_argument class GRAPECompiler(ast.NodeVisitor): def __init__(self, name, vd_type, md_type, program_model=ProgramModel.Pregel): """ Args: name: str. The name of class. vd_type: str. The type of the data stored in vertex. md_type: str. The type of the message. program_model: ProgramModel. 'Pregel' or 'PIE' """ self._name = name self._vd_type = vd_type self._md_type = md_type self._program_model = program_model # store aggregate function indexed by name self.__registed_aggregators = {} self.__globals = {} self.__func_params_name_list = [] self.__pyx_header = LinesWrapper() def set_pregel_program_model(self): self._program_model = ProgramModel.Pregel def set_pie_program_model(self): self._program_model = ProgramModel.PIE def parse(self, source): """Parse source into cython module node object. source: str The source code may represent a statement or expression. Raises: RuntimeError: unsupported ast trans from python to cython. """ tree = ast.parse(textwrap.dedent(source)) # associate `parent` reference to every node for node in ast.walk(tree): for child in ast.iter_child_nodes(node): setattr(child, "__parent__", node) cyast = self.visit(tree) return cyast def run(self, func_or_ast, pyx_header): self.__pyx_header = pyx_header # we already has a AST: just run it if isinstance(func_or_ast, ast.AST): cyast = self.visit(func_or_ast) else: check_argument(isinstance(func_or_ast, types.FunctionType)) # ignore varargs and keywords self.__func_params_name_list = inspect.getfullargspec(func_or_ast).args self.__globals = func_or_ast.__globals__ cyast = self.parse(inspect.getsource(func_or_ast)) writer = patch_cython_codewriter(CodeWriter()) cycode = "\n".join(writer.write(cyast).lines) return cycode def compile(self, source): """Compile source into cython code.""" cyast = self.parse(source) writer = patch_cython_codewriter(CodeWriter()) return "\n".join(writer.write(cyast).lines) def make_plain_arg(self, name, arg_loc): return CArgDeclNode( arg_loc, base_type=CSimpleBaseTypeNode( arg_loc, name=None, is_basic_c_type=0, signed=1, longness=0, is_self_arg=False, ), declarator=CNameDeclaratorNode(arg_loc, name=name), not_none=0, or_none=0, default=None, annotation=None, ) def make_value_arg(self, value_type, name, arg_loc): return CArgDeclNode( arg_loc, base_type=CSimpleBaseTypeNode( arg_loc, name=value_type, is_basic_c_type=0, signed=1, longness=0, is_self_arg=False, ), declarator=CNameDeclaratorNode(arg_loc, name=name), not_none=0, or_none=0, default=None, annotation=None, ) def make_ptr_arg(self, ptr_type, name, arg_loc): return CArgDeclNode( arg_loc, base_type=CSimpleBaseTypeNode( arg_loc, name=ptr_type, is_basic_c_type=0, signed=1, longness=0, is_self_arg=False, ), declarator=CPtrDeclaratorNode( arg_loc, base=CNameDeclaratorNode(arg_loc, name=name) ), not_none=0, or_none=0, default=None, annotation=None, ) def make_ref_arg(self, ref_type, name, arg_loc): return CArgDeclNode( arg_loc, base_type=CSimpleBaseTypeNode( arg_loc, name=ref_type, is_basic_c_type=0, signed=1, longness=0, complex=0, is_self_arg=False, templates=None, ), declarator=CReferenceDeclaratorNode( arg_loc, base=CNameDeclaratorNode(arg_loc, name=name) ), not_none=0, or_none=0, default=None, annotation=None, ) def make_template_arg( self, value_type, value_tpls, name, arg_loc, use_ptr=False, use_ref=False ): def mk_tpl_arg(n): return CComplexBaseTypeNode( arg_loc, base_type=CSimpleBaseTypeNode( arg_loc, name=n, is_basic_c_type=0, signed=1, longness=0, is_self_arg=False, ), declarator=CNameDeclaratorNode( arg_loc, name="", cname=None, default=None ), ) tpl_type = TemplatedTypeNode( arg_loc, positional_args=[mk_tpl_arg(n) for n in value_tpls], keyword_args=DictNode(arg_loc, key_value_pairs=[]), base_type_node=CSimpleBaseTypeNode( arg_loc, name=value_type, is_basic_c_type=0, signed=1, longness=0, is_self_arg=False, ), ) if use_ptr: declarator = CPtrDeclaratorNode( arg_loc, base=CNameDeclaratorNode(arg_loc, name=name) ) elif use_ref: declarator = CReferenceDeclaratorNode( arg_loc, base=CNameDeclaratorNode(arg_loc, name=name) ) else: declarator = CNameDeclaratorNode(arg_loc, name=name) return CArgDeclNode( arg_loc, base_type=tpl_type, declarator=declarator, not_none=0, or_none=0, default=None, annotation=None, ) def loc(self, node): return ["", 0, 0] def generic_visit(self, node): raise NotImplementedError("AST node %s is not supported yet" % node) def visit_Module(self, node): body = self.visit(node.body[0]) return ModuleNode(self.loc(node), body=body) def visit_ImportFrom(self, node): raise RuntimeError("ImportFrom is not supported yet.") def visit_Import(self, node): raise RuntimeError("Import is not supported yet.") def visit_ClassDef(self, node): raise RuntimeError("Class definition is not supported yet.") def visit_JoinedStr(self, node): raise RuntimeError("Joinedstr is not supported yet.") def visit_Constant(self, node): if isinstance(node.value, int): return IntNode(self.loc(node), value=str(node.value)) if isinstance(node.value, float): # We won't have c float, we map floating types to double return FloatNode(self.loc(node), value=str(node.value)) if isinstance(node.value, str): if node.kind == "u": return UnicodeNode(self.loc(node), value=node.value, bytes_value=None) return StringNode( self.loc(node), value=node.value, unicode_value=StringEncoding.EncodedString(node.value), ) if ( isinstance(Ellipsis, type) and isinstance(node.value, Ellipsis) or isinstance(node.value, type(Ellipsis)) ): return EllipsisNode(self.loc(node)) if isinstance(node.value, bytes): return BytesNode(self.loc(node), value=node.s) if node.value is None: return NoneNode(self.loc(node)) raise NotImplementedError("Unknown constant value: %s" % node) def visit_Num(self, node): if isinstance(node.n, int): return IntNode(self.loc(node), value=str(node.n)) if isinstance(node.n, float): return FloatNode(self.loc(node), value=str(node.n)) if isinstance(node.n, complex): raise NotImplementedError("Not support complex constant yet") raise NotImplementedError("Unknown constant value: %s" % node) def visit_Str(self, node): return StringNode( self.loc(node), value=node.s, unicode_value=StringEncoding.EncodedString(node.s), ) def visit_Bytes(self, node): return BytesNode(self.loc(node), value=node.s) def visit_List(self, node): return ListNode(self.loc(node), args=[self.visit(elt) for elt in node.elts]) def visit_Tuple(self, node): return TupleNode(self.loc(node), args=[self.visit(elt) for elt in node.elts]) def visit_Set(self, node): return SetNode(self.loc(node), args=[self.visit(elt) for elt in node.elts]) def visit_Dict(self, node): kvs = [ DictItemNode(self.loc(node), key=self.visit(k), value=self.visit(v)) for k, v in zip(node.keys, node.values) ] return DictNode(self.loc(node), key_value_pairs=kvs) def visit_Ellipsis(self, node): return EllipsisNode(self.loc(node)) def visit_NameConstant(self, node): if node.value in [True, False]: return BoolNode(self.loc(node), value=node.value) return NoneNode(self.loc(node)) def visit_Name(self, node): return NameNode(self.loc(node), name=node.id) def visit_Expr(self, node): expr = self.visit(node.value) if isinstance(expr, CVarDefNode): return expr return ExprStatNode(self.loc(node), expr=expr) def visit_UnaryOp(self, node): if isinstance(node.op, ast.UAdd): return UnaryPlusNode( self.loc(node), operator="+", operand=self.visit(node.operand) ) if isinstance(node.op, ast.USub): return UnaryMinusNode( self.loc(node), operator="-", operand=self.visit(node.operand) ) if isinstance(node.op, ast.Not): return NotNode(self.loc(node), operand=self.visit(node.operand)) if isinstance(node.op, ast.Invert): return TildeNode( self.loc(node), operator="~", operand=self.visit(node.operand) ) def visit_UAdd(self, node): return "+" def visit_USub(self, node): return "-" def visit_Not(self, node): return "not" def visit_Invert(self, node): return "invert" def visit_BinOp(self, node): lhs = self.visit(node.left) rhs = self.visit(node.right) op_mapping = { ast.Add: (AddNode, "+"), ast.Sub: (SubNode, "-"), ast.Mult: (MulNode, "*"), ast.Div: (DivNode, "/"), ast.FloorDiv: (DivNode, "//"), ast.Mod: (ModNode, "%"), ast.Pow: (PowNode, "**"), ast.MatMult: (MatMultNode, "@"), ast.LShift: (IntBinopNode, "<<"), ast.RShift: (IntBinopNode, ">>"), ast.BitOr: (IntBinopNode, "|"), ast.BitXor: (IntBinopNode, "^"), ast.BitAnd: (IntBinopNode, "&"), } op_type, op = op_mapping[type(node.op)] return op_type(self.loc(op), operator=op, operand1=lhs, operand2=rhs) def visit_Add(self, node): return "+" def visit_Sub(self, node): return "-" def visit_Mult(self, node): return "*" def visit_Div(self, node): return "/" def visit_FloorDiv(self, node): return "//" def visit_Mod(self, node): return "%" def visit_Pow(self, node): return "**" def visit_LShift(self, node): return "<<" def visit_RShift(self, node): return ">>" def visit_BitOr(self, node): return "|" def visit_BitXor(self, node): return "^" def visit_BitAnd(self, node): return "&" def visit_MatMult(self, node): return "@" def visit_AnnAssign(self, node): annotation = NameNode(self.loc(node), name=node.annotation.id) lhs = NameNode(self.loc(node), name=node.target.id, annotation=annotation) rhs = self.visit(node.value) return SingleAssignmentNode(self.loc(node), lhs=lhs, rhs=rhs) def visit_BoolOp(self, node): return BoolBinopNode( self.loc(node), operator=self.visit(node.op), operand1=self.visit(node.values[0]), operand2=self.visit(node.values[1]), ) def visit_And(self, node): return "and" def visit_Or(self, node): return "or" def visit_Compare(self, node): operator = self.visit(node.ops[0]) operand1 = self.visit(node.left) operand2 = self.visit(node.comparators[0]) if len(node.comparators) == 1: # single comparison return PrimaryCmpNode( self.loc(node), operator=operator, operand1=operand1, operand2=operand2 ) # multiple continuous comparison cascade_node = CascadedCmpNode( self.loc(node), operator=self.visit(node.ops[-1]), operand2=self.visit(node.comparators[-1]), ) for op, comparator in zip(node.ops[-2:0:-1], node.comparators[-2:0:-1]): cascade_node = CascadedCmpNode( self.loc(node), operator=self.visit(op), operand2=self.visit(comparator), cascade=cascade_node, ) return PrimaryCmpNode( self.loc(node), operator=operator, operand1=operand1, operand2=operand2, cascade=cascade_node, ) def visit_Eq(self, node): return "==" def visit_NotEq(self, node): return "!=" def visit_Lt(self, node): return "<" def visit_LtE(self, node): return "<=" def visit_Gt(self, node): return ">" def visit_GtE(self, node): return ">=" def visit_Is(self, node): return "is" def visit_IsNot(self, node): return "is not" def visit_In(self, node): return "in" def visit_NotIn(self, node): return "not in" def __flatten_func_name(self, name): if isinstance(name, ast.Name): return [name.id] if isinstance(name, ast.Attribute): return self.__flatten_func_name(name.value) + [name.attr] return [] def __is_graphscope_api_call(self, node): flat_func_name = self.__flatten_func_name(node.func) if len(flat_func_name) == 0: return False if flat_func_name[0] in self.__func_params_name_list: return True # check from graphscope module cascade = self.__globals.get(flat_func_name[0]) if cascade is None: return False for n in flat_func_name[1:]: if cascade is None or not hasattr(cascade, n): return False cascade = getattr(cascade, n) return cascade.__module__ == "graphscope.analytical.udf.types" def __visit_GraphScopeAPICall(self, node): full_func_name = self.__flatten_func_name(node.func) obj = full_func_name[0] name = node.func.attr if obj == "graphscope": # graphscope.declare() if name == "declare": var = node.args[1].id var_type = node.args[0].attr return CVarDefNode( self.loc(node), base_type=CSimpleBaseTypeNode( self.loc(node), name=var_type, module_path=[], is_basic_c_type=0, signed=1, ), declarators=[CNameDeclaratorNode(self.loc(node), name=var)], visibility="private", ) elif obj == "context" and name == "register_aggregator": # context.register_aggregator() args = node.args if len(args) != 2: raise ValueError("Params within register_aggregator incorrect.") if ( isinstance(args[1], ast.Attribute) and args[1].value.id == "PregelAggregatorType" ): self.__registed_aggregators[str(args[0].s)] = args[1].attr return SimpleCallNode( self.loc(node), function=self.visit(node.func), args=[self.visit(arg) for arg in node.args], ) elif obj == "context" and name == "aggregate": # context.aggregate() args = node.args if len(args) != 2: raise ValueError("Params within aggregate incorrect.") if str(args[0].s) not in self.__registed_aggregators.keys(): raise KeyError( "Aggregator %s not exist, you may want to register first." % str(args[0].s) ) ctype = PregelAggregatorType.extract_ctype( self.__registed_aggregators[str(args[0].s)] ) return SimpleCallNode( self.loc(node), function=IndexNode( self.loc(node), base=AttributeNode( self.loc(node), obj=NameNode(self.loc(node), name=obj), attribute=name, ), index=NameNode(self.loc(node), name=str(ctype)), ), args=[self.visit(arg) for arg in node.args], ) elif obj == "context" and name == "get_aggregated_value": # context.get_aggregated_value() args = node.args if len(args) != 1: raise ValueError("Params within get_aggregated_value incorrect.") if str(args[0].s) not in self.__registed_aggregators.keys(): raise KeyError( "Aggregator %s not exist, you may want to register first." % str(args[0].s) ) ctype = PregelAggregatorType.extract_ctype( self.__registed_aggregators[str(args[0].s)] ) return SimpleCallNode( self.loc(node), function=IndexNode( self.loc(node), base=AttributeNode( self.loc(node), obj=NameNode(self.loc(node), name=obj), attribute=name, ), index=NameNode(self.loc(node), name=str(ctype)), ), args=[self.visit(arg) for arg in node.args], ) elif obj == "context" and full_func_name[1] == "math": mnode = copy.copy(node) mnode.func = ast.Attribute(value=ast.Name(id="math"), attr=name) return self.visit(mnode) else: return SimpleCallNode( self.loc(node), function=self.visit(node.func), args=[self.visit(arg) for arg in node.args], ) def visit_Call(self, node): if self.__is_graphscope_api_call(node): return self.__visit_GraphScopeAPICall(node) if not node.keywords: return SimpleCallNode( self.loc(node), function=self.visit(node.func), args=[self.visit(arg) for arg in node.args], ) # with kwargs param return GeneralCallNode( self.loc(node), function=self.visit(node.func), positional_args=TupleNode( self.loc(node), args=[self.visit(arg) for arg in node.args] ), # keyword_args=DictNode(self.loc(node), key_value_pairs=[])) keyword_args=self._visit_keywords(node.keywords), ) def _visit_keywords(self, node): kvs = [] for keyword in node: kvs.append(self.visit_keyword(keyword)) return DictNode(self.loc(node), key_value_pairs=kvs, reject_duplicates=True) def visit_keyword(self, node): key = IdentifierStringNode(self.loc(node), value=node.arg) return DictItemNode(self.loc(node), key=key, value=self.visit(node.value)) def visit_IfExp(self, node): return CondExprNode( self.loc(node), test=self.visit(node.test), true_val=self.visit(node.body), false_val=self.visit(node.orelse), ) def visit_Attribute(self, node): full_attr_name = self.__flatten_func_name(node) if full_attr_name[0] == "context" and full_attr_name[1] == "math": mnode = copy.copy(node) mnode.value = ast.Name(id="math") return self.visit(mnode) return AttributeNode( self.loc(node), obj=self.visit(node.value), attribute=node.attr ) def visit_Subscript(self, node): return IndexNode( self.loc(node), base=self.visit(node.value), index=self.visit(node.slice) ) def visit_Index(self, node): return self.visit(node.value) def visit_Slice(self, node): start = ( NoneNode(self.loc(node)) if node.lower is None else self.visit(node.lower) ) stop = ( NoneNode(self.loc(node)) if node.upper is None else self.visit(node.upper) ) step = NoneNode(self.loc(node)) if node.step is None else self.visit(node.step) return SliceNode(self.loc(node), start=start, stop=stop, step=step) def visit_ExtSlice(self, node): return TupleNode(self.loc(node), args=[self.visit(dim) for dim in node.dims]) def visit_ListComp(self, node): check_argument(len(node.generators) == 1) # has if node or not has_if = True if node.generators[0].ifs else False expression_value = self.visit(node.elt) generator = node.generators[0] iter_value = IteratorNode( self.loc(generator.iter), sequence=self.visit(generator.iter) ) comp_node = ComprehensionAppendNode(self.loc(generator), expr=expression_value) if has_if: check_argument(len(node.generators[0].ifs) == 1) # construct IfStatNode condition = self.visit(node.generators[0].ifs[0]) body = comp_node if_stat_node = IfStatNode( self.loc(node), if_clauses=[ IfClauseNode(self.loc(node), condition=condition, body=body) ], else_clause=None, ) loop = ForInStatNode( self.loc(node), target=self.visit(generator.target), iterator=iter_value, body=if_stat_node, else_clause=None, is_async=False, ) else: loop = ForInStatNode( self.loc(node), target=self.visit(generator.target), iterator=iter_value, body=comp_node, else_clause=None, is_async=False, ) return ComprehensionNode( self.loc(node), loop=loop, append=comp_node, type=Builtin.list_type, has_local_scope=True, ) def visit_SetComp(self, node): assert len(node.generators) == 1 # has if node or not has_if = True if node.generators[0].ifs else False expression_value = self.visit(node.elt) generator = node.generators[0] iter_value = IteratorNode( self.loc(generator.iter), sequence=self.visit(generator.iter) ) comp_node = ComprehensionAppendNode(self.loc(generator), expr=expression_value) if has_if: assert len(node.generators[0].ifs) == 1 # construct IfStatNode condition = self.visit(node.generators[0].ifs[0]) body = comp_node if_stat_node = IfStatNode( self.loc(node), if_clauses=[ IfClauseNode(self.loc(node), condition=condition, body=body) ], else_clause=None, ) loop = ForInStatNode( self.loc(node), target=self.visit(generator.target), iterator=iter_value, body=if_stat_node, else_clause=None, is_async=False, ) else: loop = ForInStatNode( self.loc(node), target=self.visit(generator.target), iterator=iter_value, body=comp_node, else_clause=None, is_async=False, ) return ComprehensionNode( self.loc(node), loop=loop, append=comp_node, type=Builtin.set_type ) def visit_DictComp(self, node): assert len(node.generators) == 1 # has if node or not has_if = True if node.generators[0].ifs else False generator = node.generators[0] iter_value = IteratorNode( self.loc(generator.iter), sequence=self.visit(generator.iter) ) comp_node = DictComprehensionAppendNode( self.loc(generator), key_expr=self.visit(node.key), value_expr=self.visit(node.value), ) if has_if: assert len(node.generators[0].ifs) == 1 # construct IfStatNode condition = self.visit(node.generators[0].ifs[0]) body = comp_node if_stat_node = IfStatNode( self.loc(node), if_clauses=[ IfClauseNode(self.loc(node), condition=condition, body=body) ], else_clause=None, ) loop = ForInStatNode( self.loc(node), target=self.visit(generator.target), iterator=iter_value, body=if_stat_node, else_clause=None, is_async=False, ) else: loop = ForInStatNode( self.loc(node), target=self.visit(generator.target), iterator=iter_value, body=comp_node, else_clause=None, is_async=False, ) return ComprehensionNode( self.loc(node), loop=loop, append=comp_node, type=Builtin.dict_type ) def visit_Assign(self, node): # `tuple` represents a multiple assign assert len(node.targets) == 1 if ( hasattr(node.targets[0], "id") and node.targets[0].id in self.__func_params_name_list ): raise RuntimeError("Can't assign to internal variables.") lhs = self.visit(node.targets[0]) rhs = self.visit(node.value) return SingleAssignmentNode(self.loc(node), lhs=lhs, rhs=rhs) def visit_AugAssign(self, node): return InPlaceAssignmentNode( self.loc(node), operator=self.visit(node.op), lhs=self.visit(node.target), rhs=self.visit(node.value), ) def visit_Raise(self, node): return RaiseStatNode( self.loc(node), exc_type=self.visit(node.exc), exc_value=None, exc_tb=None, cause=None if node.cause is None else self.visit(node.cause), ) def visit_ExceptHandler(self, node): if node.type: pattern = [self.visit(node.type)] if node.name: target = NameNode(self.loc(node), name=node.name) else: target = None else: pattern = None target = None body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.body] ) return ExceptClauseNode( self.loc(node), pattern=pattern, target=target, body=body, is_except_as=False, ) def visit_Try(self, node): body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.body] ) except_clauses = [self.visit(ec) for ec in node.handlers] if node.orelse: else_clause = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.orelse] ) else: else_clause = None try_except_stat_node = TryExceptStatNode( self.loc(node), body=body, except_clauses=except_clauses, else_clause=else_clause, ) # with `finally` statement or not if node.finalbody: final_clause = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.finalbody] ) return TryFinallyStatNode( self.loc(node), body=try_except_stat_node, finally_clause=final_clause ) return try_except_stat_node def visit_Assert(self, node): return AssertStatNode( self.loc(node), cond=self.visit(node.test), value=self.visit(node.msg) if node.msg else None, ) def visit_Delete(self, node): return DelStatNode( self.loc(node), args=[self.visit(target) for target in node.targets] ) def visit_Pass(self, node): return PassStatNode(self.loc(node)) def visit_If(self, node): condition = self.visit(node.test) body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.body] ) if node.orelse: else_body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.orelse] ) else: else_body = None return IfStatNode( self.loc(node), if_clauses=[IfClauseNode(self.loc(node), condition=condition, body=body)], else_clause=else_body, ) def visit_For(self, node): target_value = self.visit(node.target) iter_value = IteratorNode(self.loc(node.iter), sequence=self.visit(node.iter)) body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.body] ) if node.orelse: else_body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.orelse] ) else: else_body = None return ForInStatNode( self.loc(node), target=target_value, iterator=iter_value, body=body, else_clause=else_body, is_async=False, ) def visit_While(self, node): condition = self.visit(node.test) body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.body] ) if node.orelse: else_body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.orelse] ) else: else_body = None return WhileStatNode( self.loc(node), condition=condition, body=body, else_clause=else_body ) def visit_withitem(self, node): return self.visit(node.context_expr) def visit_With(self, node): # multiple items is not supported yet assert len(node.items) == 1 manager = self.visit(node.items[0]) target = self.visit(node.items[0].optional_vars) body = StatListNode( self.loc(node), stats=[self.visit(stat) for stat in node.body] ) return WithStatNode(self.loc(node), manager=manager, target=target, body=body) def visit_Break(self, node): return BreakStatNode(self.loc(node)) def visit_Continue(self, node): return ContinueStatNode(self.loc(node)) def visit_FunctionDef(self, node): def is_static_method(func): return ( func.decorator_list and isinstance(func.decorator_list[0], ast.Name) and (func.decorator_list[0].id == "staticmethod") ) if not is_static_method(node): raise RuntimeError("Missing decorator staticmethod.") function_name = node.name function_return_type = "void" if self._program_model == ProgramModel.PIE: # PIE program model if function_name == ExpectFuncDef.INIT.value: args = node.args.args assert len(args) == 2, "The number of parameters does not match" args = [ self.make_ref_arg("Fragment", args[0].arg, self.loc(args[0])), self.make_template_arg( "Context", [self._vd_type, self._md_type], args[1].arg, self.loc(args[1]), use_ref=True, ), ] elif function_name == ExpectFuncDef.PEVAL.value: args = node.args.args assert len(args) == 2, "The number of parameters does not match" args = [ self.make_ref_arg("Fragment", args[0].arg, self.loc(args[0])), self.make_template_arg( "Context", [self._vd_type, self._md_type], args[1].arg, self.loc(args[1]), use_ref=True, ), ] elif function_name == ExpectFuncDef.INCEVAL.value: args = node.args.args assert len(args) == 2, "The number of parameters does not match" args = [ self.make_ref_arg("Fragment", args[0].arg, self.loc(args[0])), self.make_template_arg( "Context", [self._vd_type, self._md_type], args[1].arg, self.loc(args[1]), use_ref=True, ), ] else: raise RuntimeError( "Not recognized method named {}".format(function_name) ) elif self._program_model == ProgramModel.Pregel: if function_name == ExpectFuncDef.INIT.value: args = node.args.args assert len(args) == 2, "The number of parameters does not match" args = [ self.make_template_arg( "Vertex", [self._vd_type, self._md_type], args[0].arg, self.loc(args[0]), use_ref=True, ), self.make_template_arg( "Context", [self._vd_type, self._md_type], args[1].arg, self.loc(args[1]), use_ref=True, ), ] elif function_name == ExpectFuncDef.COMPUTE.value: args = node.args.args assert len(args) == 3, "The number of parameters does not match" args = [ self.make_template_arg( "MessageIterator", [self._md_type], args[0].arg, self.loc(args[0]), ), self.make_template_arg( "Vertex", [self._vd_type, self._md_type], args[1].arg, self.loc(args[1]), use_ref=True, ), self.make_template_arg( "Context", [self._vd_type, self._md_type], args[2].arg, self.loc(args[2]), use_ref=True, ), ] elif function_name == ExpectFuncDef.COMBINE.value: args = node.args.args assert len(args) == 1, "The number of parameters does not match" args = [ self.make_template_arg( "MessageIterator", [self._md_type], args[0].arg, self.loc(args[0]), ) ] function_return_type = self._md_type else: raise RuntimeError( "Not recognized method named {}".format(function_name) ) base_type = CSimpleBaseTypeNode( self.loc(node), name=function_return_type, is_basic_c_type=1, signed=1, longness=0, is_self_arg=False, ) declarator_name = function_name declarator = CFuncDeclaratorNode( self.loc(node), base=CNameDeclaratorNode(self.loc(node), name=declarator_name), args=args, has_varargs=False, exception_value=None, exception_check=False, nogil=True, with_gil=False, overridable=False, ) # traverse body body = StatListNode( self.loc(node), stats=[self.visit(expr) for expr in node.body] ) return CFuncDefNode( self.loc(node), visibility="public", base_type=base_type, declarator=declarator, body=body, modifiers=[], api=False, overridable=False, is_const_method=False, ) def visit_Lambda(self, node): return LambdaNode( self.loc(node), args=[ self.make_plain_arg(arg.arg, self.loc(arg)) for arg in node.args.args ], star_arg=None, starstar_arg=None, retult_expr=self.visit(node.body), ) def visit_Return(self, node): if node.value is None: value = None else: value = self.visit(node.value) return ReturnStatNode(self.loc(node), value=value) def visit_Yield(self, node): return YieldExprNode(self.loc(node), expr=self.visit(node.value)) def visit_YieldFrom(self, node): return YieldFromExprNode(self.loc(node), expr=self.visit(node.value)) def visit_Global(self, node): return GlobalNode(self.loc(node), names=node.names) def visit_Nonlocal(self, node): return NonlocalNode(self.loc(node), names=node.names) def visit_Await(self, node): return AwaitExprNode(self.loc(node), expr=self.visit(node.value))
[ "ast.Name", "Cython.Compiler.StringEncoding.EncodedString", "Cython.CodeWriter.CodeWriter", "copy.copy", "inspect.getfullargspec", "textwrap.dedent", "ast.walk", "graphscope.analytical.udf.utils.LinesWrapper", "ast.iter_child_nodes", "inspect.getsource" ]
[((3765, 3779), 'graphscope.analytical.udf.utils.LinesWrapper', 'LinesWrapper', ([], {}), '()\n', (3777, 3779), False, 'from graphscope.analytical.udf.utils import LinesWrapper\n'), ((4353, 4367), 'ast.walk', 'ast.walk', (['tree'], {}), '(tree)\n', (4361, 4367), False, 'import ast\n'), ((4255, 4278), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (4270, 4278), False, 'import textwrap\n'), ((4394, 4420), 'ast.iter_child_nodes', 'ast.iter_child_nodes', (['node'], {}), '(node)\n', (4414, 4420), False, 'import ast\n'), ((5115, 5127), 'Cython.CodeWriter.CodeWriter', 'CodeWriter', ([], {}), '()\n', (5125, 5127), False, 'from Cython.CodeWriter import CodeWriter\n'), ((5360, 5372), 'Cython.CodeWriter.CodeWriter', 'CodeWriter', ([], {}), '()\n', (5370, 5372), False, 'from Cython.CodeWriter import CodeWriter\n'), ((24285, 24300), 'copy.copy', 'copy.copy', (['node'], {}), '(node)\n', (24294, 24300), False, 'import copy\n'), ((24327, 24346), 'ast.Name', 'ast.Name', ([], {'id': '"""math"""'}), "(id='math')\n", (24335, 24346), False, 'import ast\n'), ((4916, 4951), 'inspect.getfullargspec', 'inspect.getfullargspec', (['func_or_ast'], {}), '(func_or_ast)\n', (4938, 4951), False, 'import inspect\n'), ((5041, 5071), 'inspect.getsource', 'inspect.getsource', (['func_or_ast'], {}), '(func_or_ast)\n', (5058, 5071), False, 'import inspect\n'), ((11737, 11773), 'Cython.Compiler.StringEncoding.EncodedString', 'StringEncoding.EncodedString', (['node.s'], {}), '(node.s)\n', (11765, 11773), False, 'from Cython.Compiler import StringEncoding\n'), ((10664, 10704), 'Cython.Compiler.StringEncoding.EncodedString', 'StringEncoding.EncodedString', (['node.value'], {}), '(node.value)\n', (10692, 10704), False, 'from Cython.Compiler import StringEncoding\n'), ((22358, 22373), 'copy.copy', 'copy.copy', (['node'], {}), '(node)\n', (22367, 22373), False, 'import copy\n'), ((22419, 22438), 'ast.Name', 'ast.Name', ([], {'id': '"""math"""'}), "(id='math')\n", (22427, 22438), False, 'import ast\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Manage connections of the GraphScope store service. """ import grpc from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection from gremlin_python.process.anonymous_traversal import traversal from graphscope.framework.graph_schema import GraphSchema from graphscope.framework.record import EdgeRecordKey from graphscope.framework.record import VertexRecordKey from graphscope.framework.record import to_write_requests_pb from graphscope.proto import ddl_service_pb2 from graphscope.proto import ddl_service_pb2_grpc from graphscope.proto import write_service_pb2 from graphscope.proto import write_service_pb2_grpc class Graph: def __init__(self, graph_def, conn=None) -> None: self._schema = GraphSchema() self._schema.from_graph_def(graph_def) self._conn: Connection = conn self._schema._conn = conn def schema(self): return self._schema def insert_vertex(self, vertex: VertexRecordKey, properties: dict): return self.insert_vertices([[vertex, properties]]) def insert_vertices(self, vertices: list): request = to_write_requests_pb("VERTEX", vertices, write_service_pb2.INSERT) return self._conn.batch_write(request) def update_vertex_properties(self, vertex: VertexRecordKey, properties: dict): request = to_write_requests_pb( "VERTEX", [[vertex, properties]], write_service_pb2.UPDATE ) return self._conn.batch_write(request) def delete_vertex(self, vertex_pk: VertexRecordKey): return self.delete_vertices([vertex_pk]) def delete_vertices(self, vertex_pks: list): request = to_write_requests_pb( "VERTEX", [[pk, {}] for pk in vertex_pks], write_service_pb2.DELETE ) return self._conn.batch_write(request) def insert_edge(self, edge: EdgeRecordKey, properties: dict): return self.insert_edges([[edge, properties]]) def insert_edges(self, edges: list): request = to_write_requests_pb("EDGE", edges, write_service_pb2.INSERT) return self._conn.batch_write(request) def update_edge_properties(self, edge: EdgeRecordKey, properties: dict): request = to_write_requests_pb( "EDGE", [[edge, properties]], write_service_pb2.UPDATE ) return self._conn.batch_write(request) def delete_edge(self, edge: EdgeRecordKey): return self.delete_edges([edge]) def delete_edges(self, edge_pks: list): request = to_write_requests_pb( "EDGE", [[pk, {}] for pk in edge_pks], write_service_pb2.DELETE ) return self._conn.batch_write(request) class Connection: def __init__(self, addr, gremlin_endpoint=None) -> None: self._addr = addr self._gremlin_endpoint = gremlin_endpoint self._conn = None channel = grpc.insecure_channel(addr) self._ddl_service_stub = ddl_service_pb2_grpc.ClientDdlStub(channel) self._write_service_stub = write_service_pb2_grpc.ClientWriteStub(channel) self._client_id = None def close(self): if self._conn is not None: try: self._conn.close() except Exception: pass # be silent when closing self._conn = None def submit(self, requests): return self._ddl_service_stub.batchSubmit(requests) def get_graph_def(self, requests): return self._ddl_service_stub.getGraphDef(requests) def g(self): request = ddl_service_pb2.GetGraphDefRequest() graph_def = self.get_graph_def(request).graph_def graph = Graph(graph_def, self) return graph def gremlin(self): graph_url = "ws://%s/gremlin" % self._gremlin_endpoint if self._conn is None: self._conn = DriverRemoteConnection(graph_url, "g") return traversal().withRemote(self._conn) def _get_client_id(self): if self._client_id is None: request = write_service_pb2.GetClientIdRequest() response = self._write_service_stub.getClientId(request) self._client_id = response.client_id return self._client_id def batch_write(self, request): request.client_id = self._get_client_id() response = self._write_service_stub.batchWrite(request) return response.snapshot_id def remote_flush(self, snapshot_id, timeout_ms=3000): request = write_service_pb2.RemoteFlushRequest() request.snapshot_id = snapshot_id request.wait_time_ms = timeout_ms response = self._write_service_stub.remoteFlush(request) return response.success def conn(addr, gremlin_endpoint=None): return Connection(addr, gremlin_endpoint)
[ "graphscope.proto.write_service_pb2.RemoteFlushRequest", "gremlin_python.driver.driver_remote_connection.DriverRemoteConnection", "grpc.insecure_channel", "graphscope.framework.record.to_write_requests_pb", "graphscope.proto.write_service_pb2_grpc.ClientWriteStub", "graphscope.proto.ddl_service_pb2_grpc.ClientDdlStub", "graphscope.framework.graph_schema.GraphSchema", "gremlin_python.process.anonymous_traversal.traversal", "graphscope.proto.ddl_service_pb2.GetGraphDefRequest", "graphscope.proto.write_service_pb2.GetClientIdRequest" ]
[((1403, 1416), 'graphscope.framework.graph_schema.GraphSchema', 'GraphSchema', ([], {}), '()\n', (1414, 1416), False, 'from graphscope.framework.graph_schema import GraphSchema\n'), ((1786, 1852), 'graphscope.framework.record.to_write_requests_pb', 'to_write_requests_pb', (['"""VERTEX"""', 'vertices', 'write_service_pb2.INSERT'], {}), "('VERTEX', vertices, write_service_pb2.INSERT)\n", (1806, 1852), False, 'from graphscope.framework.record import to_write_requests_pb\n'), ((2002, 2087), 'graphscope.framework.record.to_write_requests_pb', 'to_write_requests_pb', (['"""VERTEX"""', '[[vertex, properties]]', 'write_service_pb2.UPDATE'], {}), "('VERTEX', [[vertex, properties]], write_service_pb2.UPDATE\n )\n", (2022, 2087), False, 'from graphscope.framework.record import to_write_requests_pb\n'), ((2327, 2420), 'graphscope.framework.record.to_write_requests_pb', 'to_write_requests_pb', (['"""VERTEX"""', '[[pk, {}] for pk in vertex_pks]', 'write_service_pb2.DELETE'], {}), "('VERTEX', [[pk, {}] for pk in vertex_pks],\n write_service_pb2.DELETE)\n", (2347, 2420), False, 'from graphscope.framework.record import to_write_requests_pb\n'), ((2668, 2729), 'graphscope.framework.record.to_write_requests_pb', 'to_write_requests_pb', (['"""EDGE"""', 'edges', 'write_service_pb2.INSERT'], {}), "('EDGE', edges, write_service_pb2.INSERT)\n", (2688, 2729), False, 'from graphscope.framework.record import to_write_requests_pb\n'), ((2873, 2949), 'graphscope.framework.record.to_write_requests_pb', 'to_write_requests_pb', (['"""EDGE"""', '[[edge, properties]]', 'write_service_pb2.UPDATE'], {}), "('EDGE', [[edge, properties]], write_service_pb2.UPDATE)\n", (2893, 2949), False, 'from graphscope.framework.record import to_write_requests_pb\n'), ((3172, 3261), 'graphscope.framework.record.to_write_requests_pb', 'to_write_requests_pb', (['"""EDGE"""', '[[pk, {}] for pk in edge_pks]', 'write_service_pb2.DELETE'], {}), "('EDGE', [[pk, {}] for pk in edge_pks],\n write_service_pb2.DELETE)\n", (3192, 3261), False, 'from graphscope.framework.record import to_write_requests_pb\n'), ((3528, 3555), 'grpc.insecure_channel', 'grpc.insecure_channel', (['addr'], {}), '(addr)\n', (3549, 3555), False, 'import grpc\n'), ((3589, 3632), 'graphscope.proto.ddl_service_pb2_grpc.ClientDdlStub', 'ddl_service_pb2_grpc.ClientDdlStub', (['channel'], {}), '(channel)\n', (3623, 3632), False, 'from graphscope.proto import ddl_service_pb2_grpc\n'), ((3668, 3715), 'graphscope.proto.write_service_pb2_grpc.ClientWriteStub', 'write_service_pb2_grpc.ClientWriteStub', (['channel'], {}), '(channel)\n', (3706, 3715), False, 'from graphscope.proto import write_service_pb2_grpc\n'), ((4192, 4228), 'graphscope.proto.ddl_service_pb2.GetGraphDefRequest', 'ddl_service_pb2.GetGraphDefRequest', ([], {}), '()\n', (4226, 4228), False, 'from graphscope.proto import ddl_service_pb2\n'), ((5120, 5158), 'graphscope.proto.write_service_pb2.RemoteFlushRequest', 'write_service_pb2.RemoteFlushRequest', ([], {}), '()\n', (5156, 5158), False, 'from graphscope.proto import write_service_pb2\n'), ((4490, 4528), 'gremlin_python.driver.driver_remote_connection.DriverRemoteConnection', 'DriverRemoteConnection', (['graph_url', '"""g"""'], {}), "(graph_url, 'g')\n", (4512, 4528), False, 'from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection\n'), ((4668, 4706), 'graphscope.proto.write_service_pb2.GetClientIdRequest', 'write_service_pb2.GetClientIdRequest', ([], {}), '()\n', (4704, 4706), False, 'from graphscope.proto import write_service_pb2\n'), ((4544, 4555), 'gremlin_python.process.anonymous_traversal.traversal', 'traversal', ([], {}), '()\n', (4553, 4555), False, 'from gremlin_python.process.anonymous_traversal import traversal\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import base64 import json import logging import os import sys import graphscope.learning.graphlearn as gl logger = logging.getLogger("graphscope") def decode_arg(arg): if isinstance(arg, dict): return arg return json.loads(base64.b64decode(arg.encode("utf-8")).decode("utf-8")) def launch_server(handle, config, server_index): logger.info("server = %s", handle["server"]) logger.info("handle = %s", handle) logger.info("config = %s", config) g = gl.Graph().vineyard(handle, config["nodes"], config["edges"]) for label, node_attr in config["node_attributes"].items(): n_ints, n_floats, n_strings = ( node_attr[1][0], node_attr[1][1], node_attr[1][2], ) g.node_attributes(label, node_attr[0], n_ints, n_floats, n_strings) for label, edge_attr in config["edge_attributes"].items(): n_ints, n_floats, n_strings = ( edge_attr[1][0], edge_attr[1][1], edge_attr[1][2], ) g.edge_attributes(label, edge_attr[0], n_ints, n_floats, n_strings) for node_view_label, node_label, nsplit, split_range in config["gen_labels"]: g.node_view(node_view_label, node_label, nsplit=nsplit, split_range=split_range) # we guess the "worker_count" doesn't matter in the server side. g = g.init_vineyard(server_index=server_index, worker_count=0) g.close() if __name__ == "__main__": if len(sys.argv) < 3: print("Usage: ./learning.py <handle> <config> <server_index>", file=sys.stderr) sys.exit(-1) handle = decode_arg(sys.argv[1]) config = decode_arg(sys.argv[2]) server_index = int(sys.argv[3]) launch_server(handle, config, server_index)
[ "graphscope.learning.graphlearn.Graph", "sys.exit", "logging.getLogger" ]
[((784, 815), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (801, 815), False, 'import logging\n'), ((2240, 2252), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2248, 2252), False, 'import sys\n'), ((1151, 1161), 'graphscope.learning.graphlearn.Graph', 'gl.Graph', ([], {}), '()\n', (1159, 1161), True, 'import graphscope.learning.graphlearn as gl\n')]
import networkx.algorithms.tests.test_dag import pytest from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tests.test_dag, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tests.test_dag import TestDAG from networkx.algorithms.tests.test_dag import TestDagLongestPath @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestDagLongestPath) class TestDagLongestPath: @pytest.mark.skip(reason="not support class object as node") def test_unorderable_nodes(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multigraph(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multidigraph(self): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestDAG) class TestDAG: @pytest.mark.skip(reason="not support multigraph") def test_all_topological_sorts_3(self): pass @pytest.mark.skip(reason="not support multigraph") def test_all_topological_sorts_multigraph_1(self): pass @pytest.mark.skip(reason="not support multigraph") def test_all_topological_sorts_multigraph_2(self): pass @pytest.mark.skip(reason="not support class object as node") def test_lexicographical_topological_sort2(self): pass @pytest.mark.skip(reason="not support None object as attribute") class TestDagToBranching: pass
[ "graphscope.experimental.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((480, 525), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (503, 525), False, 'import pytest\n'), ((527, 573), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestDagLongestPath'], {}), '(TestDagLongestPath)\n', (553, 573), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((921, 966), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (944, 966), False, 'import pytest\n'), ((968, 1003), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestDAG'], {}), '(TestDAG)\n', (994, 1003), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1515, 1578), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support None object as attribute"""'}), "(reason='not support None object as attribute')\n", (1531, 1578), False, 'import pytest\n'), ((605, 664), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support class object as node"""'}), "(reason='not support class object as node')\n", (621, 664), False, 'import pytest\n'), ((722, 771), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (738, 771), False, 'import pytest\n'), ((822, 871), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (838, 871), False, 'import pytest\n'), ((1024, 1073), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1040, 1073), False, 'import pytest\n'), ((1137, 1186), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1153, 1186), False, 'import pytest\n'), ((1261, 1310), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1277, 1310), False, 'import pytest\n'), ((1385, 1444), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support class object as node"""'}), "(reason='not support class object as node')\n", (1401, 1444), False, 'import pytest\n'), ((308, 353), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (331, 353), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import os import signal import subprocess import sys import graphscope from graphscope.config import GSConfig as gs_config from graphscope.deploy.launcher import Launcher from graphscope.framework.utils import PipeWatcher from graphscope.framework.utils import get_free_port from graphscope.framework.utils import in_notebook from graphscope.framework.utils import is_free_port from graphscope.framework.utils import random_string try: import gscoordinator COORDINATOR_HOME = os.path.abspath(os.path.join(gscoordinator.__file__, "..", "..")) except ModuleNotFoundError: # If gscoordinator is not installed, try to locate it by relative path, # which is strong related with the directory structure of GraphScope COORDINATOR_HOME = os.path.abspath( os.path.join(__file__, "..", "..", "..", "..", "..", "coordinator") ) logger = logging.getLogger("graphscope") class HostsClusterLauncher(Launcher): """Class for setting up GraphScope instance on hosts cluster""" def __init__( self, hosts=None, port=None, num_workers=None, vineyard_socket=None, timeout_seconds=None, vineyard_shared_mem=None, **kwargs ): self._hosts = hosts self._port = port self._num_workers = num_workers self._vineyard_socket = vineyard_socket self._timeout_seconds = timeout_seconds self._vineyard_shared_mem = vineyard_shared_mem self._instance_id = random_string(6) self._proc = None self._closed = True def poll(self): if self._proc is not None: return self._proc.poll() return -1 def _launch_coordinator(self): if self._port is None: self._port = get_free_port() else: # check port conflict if not is_free_port(self._port): raise RuntimeError("Port {} already used.".format(self._port)) self._coordinator_endpoint = "{}:{}".format(self._hosts[0], self._port) cmd = [ sys.executable, "-m", "gscoordinator", "--num_workers", "{}".format(str(self._num_workers)), "--hosts", "{}".format(",".join(self._hosts)), "--log_level", "{}".format(gs_config.log_level), "--timeout_seconds", "{}".format(self._timeout_seconds), "--port", "{}".format(str(self._port)), "--cluster_type", self.type(), "--instance_id", self._instance_id, ] if self._vineyard_shared_mem is not None: cmd.extend(["--vineyard_shared_mem", self._vineyard_shared_mem]) if self._vineyard_socket: cmd.extend(["--vineyard_socket", "{}".format(self._vineyard_socket)]) logger.info("Initializing coordinator with command: %s", " ".join(cmd)) env = os.environ.copy() env["PYTHONUNBUFFERED"] = "TRUE" # add graphscope module to PYTHONPATH if "PYTHONPATH" in env: env["PYTHONPATH"] = ( os.path.join(os.path.dirname(graphscope.__file__), "..") + os.pathsep + env["PYTHONPATH"] ) else: env["PYTHONPATH"] = os.path.join(os.path.dirname(graphscope.__file__), "..") # Param `start_new_session=True` is for putting child process to a new process group # so it won't get the signals from parent. # In notebook environment, we need to accept the signal from kernel restarted/stoped. process = subprocess.Popen( cmd, start_new_session=False if in_notebook() else True, cwd=COORDINATOR_HOME, env=env, encoding="utf-8", errors="replace", stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, bufsize=1, ) stdout_watcher = PipeWatcher(process.stdout, sys.stdout) if not gs_config.show_log: stdout_watcher.add_filter( lambda line: "Loading" in line and "it/s]" in line ) setattr(process, "stdout_watcher", stdout_watcher) stderr_watcher = PipeWatcher(process.stderr, sys.stderr) setattr(process, "stderr_watcher", stderr_watcher) self._proc = process def type(self): return "hosts" def start(self): """Launch graphscope instance on hosts cluster. Raises: RuntimeError: If instance launch failed or timeout. Returns: tuple of process and endpoint """ try: self._launch_coordinator() self._closed = False logger.info( "Coordinator service started successful, connecting to service..." ) except Exception as e: self.stop() raise RuntimeError( "Error when launching coordinator on hosts cluster" ) from e def stop(self): """Stop GraphScope instance.""" # coordinator's GRPCServer.wait_for_termination works for SIGINT (Ctrl-C) if not self._closed: if self._proc is not None: self._proc.send_signal(signal.SIGINT) self._proc.wait(timeout=10) self._proc = None self._closed = True
[ "os.path.join", "logging.getLogger", "os.path.dirname", "graphscope.framework.utils.get_free_port", "os.environ.copy", "graphscope.framework.utils.is_free_port", "graphscope.framework.utils.PipeWatcher", "graphscope.framework.utils.in_notebook", "graphscope.framework.utils.random_string" ]
[((1545, 1576), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1562, 1576), False, 'import logging\n'), ((1186, 1234), 'os.path.join', 'os.path.join', (['gscoordinator.__file__', '""".."""', '""".."""'], {}), "(gscoordinator.__file__, '..', '..')\n", (1198, 1234), False, 'import os\n'), ((2176, 2192), 'graphscope.framework.utils.random_string', 'random_string', (['(6)'], {}), '(6)\n', (2189, 2192), False, 'from graphscope.framework.utils import random_string\n'), ((3644, 3661), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (3659, 3661), False, 'import os\n'), ((4746, 4785), 'graphscope.framework.utils.PipeWatcher', 'PipeWatcher', (['process.stdout', 'sys.stdout'], {}), '(process.stdout, sys.stdout)\n', (4757, 4785), False, 'from graphscope.framework.utils import PipeWatcher\n'), ((5025, 5064), 'graphscope.framework.utils.PipeWatcher', 'PipeWatcher', (['process.stderr', 'sys.stderr'], {}), '(process.stderr, sys.stderr)\n', (5036, 5064), False, 'from graphscope.framework.utils import PipeWatcher\n'), ((1461, 1528), 'os.path.join', 'os.path.join', (['__file__', '""".."""', '""".."""', '""".."""', '""".."""', '""".."""', '"""coordinator"""'], {}), "(__file__, '..', '..', '..', '..', '..', 'coordinator')\n", (1473, 1528), False, 'import os\n'), ((2450, 2465), 'graphscope.framework.utils.get_free_port', 'get_free_port', ([], {}), '()\n', (2463, 2465), False, 'from graphscope.framework.utils import get_free_port\n'), ((2533, 2557), 'graphscope.framework.utils.is_free_port', 'is_free_port', (['self._port'], {}), '(self._port)\n', (2545, 2557), False, 'from graphscope.framework.utils import is_free_port\n'), ((4026, 4062), 'os.path.dirname', 'os.path.dirname', (['graphscope.__file__'], {}), '(graphscope.__file__)\n', (4041, 4062), False, 'import os\n'), ((4401, 4414), 'graphscope.framework.utils.in_notebook', 'in_notebook', ([], {}), '()\n', (4412, 4414), False, 'from graphscope.framework.utils import in_notebook\n'), ((3844, 3880), 'os.path.dirname', 'os.path.dirname', (['graphscope.__file__'], {}), '(graphscope.__file__)\n', (3859, 3880), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import io import os import pytest from networkx.readwrite.tests.test_adjlist import TestAdjlist from networkx.readwrite.tests.test_adjlist import TestMultilineAdjlist import graphscope.nx as nx from graphscope.nx.tests.utils import assert_edges_equal from graphscope.nx.tests.utils import assert_graphs_equal from graphscope.nx.tests.utils import assert_nodes_equal from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestAdjlist) class TestAdjlist: def test_parse_adjlist(self): lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] nx.parse_adjlist(lines, nodetype=int) # smoke test with pytest.raises(TypeError): nx.parse_adjlist(lines, nodetype="int") lines = ["1 2 5", "2 b", "c"] with pytest.raises(ValueError): nx.parse_adjlist(lines, nodetype=int) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestMultilineAdjlist) class TestMultilineAdjlist: pass
[ "pytest.raises", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures", "graphscope.nx.parse_adjlist" ]
[((837, 882), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (860, 882), False, 'import pytest\n'), ((884, 923), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestAdjlist'], {}), '(TestAdjlist)\n', (910, 923), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1311, 1356), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1334, 1356), False, 'import pytest\n'), ((1358, 1406), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestMultilineAdjlist'], {}), '(TestMultilineAdjlist)\n', (1384, 1406), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1037, 1074), 'graphscope.nx.parse_adjlist', 'nx.parse_adjlist', (['lines'], {'nodetype': 'int'}), '(lines, nodetype=int)\n', (1053, 1074), True, 'import graphscope.nx as nx\n'), ((1102, 1126), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1115, 1126), False, 'import pytest\n'), ((1140, 1179), 'graphscope.nx.parse_adjlist', 'nx.parse_adjlist', (['lines'], {'nodetype': '"""int"""'}), "(lines, nodetype='int')\n", (1156, 1179), True, 'import graphscope.nx as nx\n'), ((1231, 1256), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1244, 1256), False, 'import pytest\n'), ((1270, 1307), 'graphscope.nx.parse_adjlist', 'nx.parse_adjlist', (['lines'], {'nodetype': 'int'}), '(lines, nodetype=int)\n', (1286, 1307), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pytest from networkx.tests.test_convert import TestConvert as _TestConvert from networkx.utils import edges_equal from networkx.utils import nodes_equal import graphscope.nx as nx from graphscope.nx.convert import from_dict_of_dicts from graphscope.nx.convert import from_dict_of_lists from graphscope.nx.convert import to_dict_of_dicts from graphscope.nx.convert import to_dict_of_lists from graphscope.nx.convert import to_networkx_graph from graphscope.nx.generators.classic import barbell_graph from graphscope.nx.generators.classic import cycle_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(_TestConvert) class TestConvert: def test_attribute_dict_integrity(self): # we must not replace dict-like graph data structures with dicts G = nx.Graph() G.add_nodes_from("abc") H = to_networkx_graph(G, create_using=nx.Graph) assert sorted(list(H.nodes)) == sorted(list(G.nodes)) H = nx.Graph(G) assert sorted(list(H.nodes)) == sorted(list(G.nodes)) def test_graph(self): g = nx.cycle_graph(10) G = nx.Graph() G.add_nodes_from(g) G.add_weighted_edges_from((u, v, u) for u, v in g.edges()) # Dict of dicts dod = to_dict_of_dicts(G) GG = from_dict_of_dicts(dod, create_using=nx.Graph) assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) assert edges_equal(sorted(G.edges()), sorted(GG.edges())) GW = to_networkx_graph(dod, create_using=nx.Graph) assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) assert edges_equal(sorted(G.edges()), sorted(GW.edges())) GI = nx.Graph(dod) assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes())) assert edges_equal(sorted(G.edges()), sorted(GI.edges())) # Dict of lists dol = to_dict_of_lists(G) GG = from_dict_of_lists(dol, create_using=nx.Graph) # dict of lists throws away edge data so set it to none enone = [(u, v, {}) for (u, v, d) in G.edges(data=True)] assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) assert edges_equal(enone, sorted(GG.edges(data=True))) GW = to_networkx_graph(dol, create_using=nx.Graph) assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) assert edges_equal(enone, sorted(GW.edges(data=True))) GI = nx.Graph(dol) assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes())) assert edges_equal(enone, sorted(GI.edges(data=True))) def test_custom_node_attr_dict_safekeeping(self): pass
[ "graphscope.nx.convert.from_dict_of_lists", "graphscope.nx.utils.compat.with_graphscope_nx_context", "graphscope.nx.Graph", "graphscope.nx.convert.from_dict_of_dicts", "graphscope.nx.cycle_graph", "pytest.mark.usefixtures", "graphscope.nx.convert.to_dict_of_lists", "graphscope.nx.convert.to_dict_of_dicts", "graphscope.nx.convert.to_networkx_graph" ]
[((1033, 1078), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1056, 1078), False, 'import pytest\n'), ((1080, 1120), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_TestConvert'], {}), '(_TestConvert)\n', (1106, 1120), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1270, 1280), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1278, 1280), True, 'import graphscope.nx as nx\n'), ((1325, 1368), 'graphscope.nx.convert.to_networkx_graph', 'to_networkx_graph', (['G'], {'create_using': 'nx.Graph'}), '(G, create_using=nx.Graph)\n', (1342, 1368), False, 'from graphscope.nx.convert import to_networkx_graph\n'), ((1443, 1454), 'graphscope.nx.Graph', 'nx.Graph', (['G'], {}), '(G)\n', (1451, 1454), True, 'import graphscope.nx as nx\n'), ((1556, 1574), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(10)'], {}), '(10)\n', (1570, 1574), True, 'import graphscope.nx as nx\n'), ((1587, 1597), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1595, 1597), True, 'import graphscope.nx as nx\n'), ((1732, 1751), 'graphscope.nx.convert.to_dict_of_dicts', 'to_dict_of_dicts', (['G'], {}), '(G)\n', (1748, 1751), False, 'from graphscope.nx.convert import to_dict_of_dicts\n'), ((1765, 1811), 'graphscope.nx.convert.from_dict_of_dicts', 'from_dict_of_dicts', (['dod'], {'create_using': 'nx.Graph'}), '(dod, create_using=nx.Graph)\n', (1783, 1811), False, 'from graphscope.nx.convert import from_dict_of_dicts\n'), ((1957, 2002), 'graphscope.nx.convert.to_networkx_graph', 'to_networkx_graph', (['dod'], {'create_using': 'nx.Graph'}), '(dod, create_using=nx.Graph)\n', (1974, 2002), False, 'from graphscope.nx.convert import to_networkx_graph\n'), ((2148, 2161), 'graphscope.nx.Graph', 'nx.Graph', (['dod'], {}), '(dod)\n', (2156, 2161), True, 'import graphscope.nx as nx\n'), ((2333, 2352), 'graphscope.nx.convert.to_dict_of_lists', 'to_dict_of_lists', (['G'], {}), '(G)\n', (2349, 2352), False, 'from graphscope.nx.convert import to_dict_of_lists\n'), ((2366, 2412), 'graphscope.nx.convert.from_dict_of_lists', 'from_dict_of_lists', (['dol'], {'create_using': 'nx.Graph'}), '(dol, create_using=nx.Graph)\n', (2384, 2412), False, 'from graphscope.nx.convert import from_dict_of_lists\n'), ((2684, 2729), 'graphscope.nx.convert.to_networkx_graph', 'to_networkx_graph', (['dol'], {'create_using': 'nx.Graph'}), '(dol, create_using=nx.Graph)\n', (2701, 2729), False, 'from graphscope.nx.convert import to_networkx_graph\n'), ((2872, 2885), 'graphscope.nx.Graph', 'nx.Graph', (['dol'], {}), '(dol)\n', (2880, 2885), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import os from pathlib import Path import numpy as np import pandas as pd import pytest from graphscope import JavaApp @pytest.fixture(scope="module") def not_exist_jar(): path = os.path.join("not_exist_dir", "not_exist.jar") return path @pytest.fixture(scope="module") def not_jar_file(): return os.path.expandvars("${GS_TEST_DIR}/p2p-31.e") @pytest.fixture(scope="module") def a_gar_file(): return os.path.expandvars("${GS_TEST_DIR}/gars/sssp_pie.gar") @pytest.fixture(scope="module") def empty_jar(): return os.path.expandvars("${GS_TEST_DIR}/jars/empty.jar") @pytest.fixture(scope="module") def demo_jar(): return os.path.expandvars("${USER_JAR_PATH}") @pytest.fixture(scope="module") def projected_graph_sssp_class(): return "com.alibaba.graphscope.example.sssp.SSSP" @pytest.fixture(scope="module") def non_exist_java_class(): return "com.alibaba.graphscope.example.non.existing.java.class" @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_non_existing_jar( not_exist_jar, projected_graph_sssp_class, non_exist_java_class ): with pytest.raises(FileNotFoundError): sssp = JavaApp(not_exist_jar, projected_graph_sssp_class) with pytest.raises(FileNotFoundError): sssp = JavaApp(not_exist_jar, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_not_a_jar(not_jar_file, projected_graph_sssp_class, non_exist_java_class): with pytest.raises(KeyError): sssp = JavaApp(not_jar_file, projected_graph_sssp_class) with pytest.raises(KeyError): sssp = JavaApp(not_jar_file, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_gar_file(a_gar_file, projected_graph_sssp_class, non_exist_java_class): with pytest.raises(KeyError): sssp = JavaApp(a_gar_file, projected_graph_sssp_class) with pytest.raises(KeyError): sssp = JavaApp(a_gar_file, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_empty_jar(empty_jar, projected_graph_sssp_class, non_exist_java_class): with pytest.raises(KeyError): sssp = JavaApp(empty_jar, projected_graph_sssp_class) with pytest.raises(KeyError): sssp = JavaApp(empty_jar, non_exist_java_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_load_correct_jar(projected_graph_sssp_class, demo_jar): sssp = JavaApp(demo_jar, projected_graph_sssp_class) @pytest.mark.skipif( os.environ.get("RUN_JAVA_TESTS") != "ON", reason="Java SDK is disabled, skip this test.", ) def test_sssp_property_vertex_data( demo_jar, graphscope_session, p2p_project_directed_graph, projected_graph_sssp_class, ): sssp = JavaApp(full_jar_path=demo_jar, java_app_class=projected_graph_sssp_class) sssp(p2p_project_directed_graph, src=6, threadNum=1)
[ "os.path.join", "pytest.fixture", "graphscope.JavaApp", "pytest.raises", "os.path.expandvars", "os.environ.get" ]
[((806, 836), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (820, 836), False, 'import pytest\n'), ((935, 965), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (949, 965), False, 'import pytest\n'), ((1046, 1076), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1060, 1076), False, 'import pytest\n'), ((1164, 1194), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1178, 1194), False, 'import pytest\n'), ((1278, 1308), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1292, 1308), False, 'import pytest\n'), ((1378, 1408), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1392, 1408), False, 'import pytest\n'), ((1500, 1530), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1514, 1530), False, 'import pytest\n'), ((869, 915), 'os.path.join', 'os.path.join', (['"""not_exist_dir"""', '"""not_exist.jar"""'], {}), "('not_exist_dir', 'not_exist.jar')\n", (881, 915), False, 'import os\n'), ((997, 1042), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/p2p-31.e"""'], {}), "('${GS_TEST_DIR}/p2p-31.e')\n", (1015, 1042), False, 'import os\n'), ((1106, 1160), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/gars/sssp_pie.gar"""'], {}), "('${GS_TEST_DIR}/gars/sssp_pie.gar')\n", (1124, 1160), False, 'import os\n'), ((1223, 1274), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/jars/empty.jar"""'], {}), "('${GS_TEST_DIR}/jars/empty.jar')\n", (1241, 1274), False, 'import os\n'), ((1336, 1374), 'os.path.expandvars', 'os.path.expandvars', (['"""${USER_JAR_PATH}"""'], {}), "('${USER_JAR_PATH}')\n", (1354, 1374), False, 'import os\n'), ((3460, 3505), 'graphscope.JavaApp', 'JavaApp', (['demo_jar', 'projected_graph_sssp_class'], {}), '(demo_jar, projected_graph_sssp_class)\n', (3467, 3505), False, 'from graphscope import JavaApp\n'), ((3781, 3855), 'graphscope.JavaApp', 'JavaApp', ([], {'full_jar_path': 'demo_jar', 'java_app_class': 'projected_graph_sssp_class'}), '(full_jar_path=demo_jar, java_app_class=projected_graph_sssp_class)\n', (3788, 3855), False, 'from graphscope import JavaApp\n'), ((1862, 1894), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (1875, 1894), False, 'import pytest\n'), ((1911, 1961), 'graphscope.JavaApp', 'JavaApp', (['not_exist_jar', 'projected_graph_sssp_class'], {}), '(not_exist_jar, projected_graph_sssp_class)\n', (1918, 1961), False, 'from graphscope import JavaApp\n'), ((1971, 2003), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (1984, 2003), False, 'import pytest\n'), ((2020, 2064), 'graphscope.JavaApp', 'JavaApp', (['not_exist_jar', 'non_exist_java_class'], {}), '(not_exist_jar, non_exist_java_class)\n', (2027, 2064), False, 'from graphscope import JavaApp\n'), ((1654, 1686), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (1668, 1686), False, 'import os\n'), ((2286, 2309), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2299, 2309), False, 'import pytest\n'), ((2326, 2375), 'graphscope.JavaApp', 'JavaApp', (['not_jar_file', 'projected_graph_sssp_class'], {}), '(not_jar_file, projected_graph_sssp_class)\n', (2333, 2375), False, 'from graphscope import JavaApp\n'), ((2385, 2408), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2398, 2408), False, 'import pytest\n'), ((2425, 2468), 'graphscope.JavaApp', 'JavaApp', (['not_jar_file', 'non_exist_java_class'], {}), '(not_jar_file, non_exist_java_class)\n', (2432, 2468), False, 'from graphscope import JavaApp\n'), ((2092, 2124), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (2106, 2124), False, 'import os\n'), ((2687, 2710), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2700, 2710), False, 'import pytest\n'), ((2727, 2774), 'graphscope.JavaApp', 'JavaApp', (['a_gar_file', 'projected_graph_sssp_class'], {}), '(a_gar_file, projected_graph_sssp_class)\n', (2734, 2774), False, 'from graphscope import JavaApp\n'), ((2784, 2807), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2797, 2807), False, 'import pytest\n'), ((2824, 2865), 'graphscope.JavaApp', 'JavaApp', (['a_gar_file', 'non_exist_java_class'], {}), '(a_gar_file, non_exist_java_class)\n', (2831, 2865), False, 'from graphscope import JavaApp\n'), ((2496, 2528), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (2510, 2528), False, 'import os\n'), ((3084, 3107), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (3097, 3107), False, 'import pytest\n'), ((3124, 3170), 'graphscope.JavaApp', 'JavaApp', (['empty_jar', 'projected_graph_sssp_class'], {}), '(empty_jar, projected_graph_sssp_class)\n', (3131, 3170), False, 'from graphscope import JavaApp\n'), ((3180, 3203), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (3193, 3203), False, 'import pytest\n'), ((3220, 3260), 'graphscope.JavaApp', 'JavaApp', (['empty_jar', 'non_exist_java_class'], {}), '(empty_jar, non_exist_java_class)\n', (3227, 3260), False, 'from graphscope import JavaApp\n'), ((2893, 2925), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (2907, 2925), False, 'import os\n'), ((3288, 3320), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (3302, 3320), False, 'import os\n'), ((3533, 3565), 'os.environ.get', 'os.environ.get', (['"""RUN_JAVA_TESTS"""'], {}), "('RUN_JAVA_TESTS')\n", (3547, 3565), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pandas as pd import pytest from graphscope import nx from graphscope.nx.tests.utils import almost_equal from graphscope.nx.utils.misc import replace_with_inf @pytest.mark.usefixtures("graphscope_session") class TestBuiltInApp: @classmethod def setup_class(cls): cls.grid_edges = [ (1, 2), (1, 5), (2, 3), (2, 6), (3, 4), (3, 7), (4, 8), (5, 6), (5, 9), (6, 7), (6, 10), (7, 8), (7, 11), (8, 12), (9, 10), (9, 13), (10, 11), (10, 14), (11, 12), (11, 15), (12, 16), (13, 14), (14, 15), (15, 16), ] cls.grid = nx.Graph() cls.grid.add_edges_from(cls.grid_edges, weight=1) cls.grid_ans = { 1: 0, 5: 1, 2: 1, 9: 2, 6: 2, 3: 2, 13: 3, 10: 3, 7: 3, 4: 3, 14: 4, 11: 4, 8: 4, 15: 5, 12: 5, 16: 6, } cls.grid_path_ans = { 3: 7, 4: 8, 8: 12, 12: 16, 1: 5, 5: 9, 9: 13, 2: 3, 6: 10, 10: 14, 7: 11, 11: 15, } data_dir = os.path.expandvars("${GS_TEST_DIR}") p2p_file = os.path.expandvars("${GS_TEST_DIR}/p2p-31.e") p2p_sub_file = os.path.expandvars( "${GS_TEST_DIR}/dynamic/p2p-31_dynamic_subgraph.edgelist" ) cls.p2p = nx.read_edgelist( p2p_file, nodetype=int, data=(("weight", int),), create_using=nx.DiGraph ) cls.p2p_undirected = nx.read_edgelist( p2p_file, nodetype=int, data=(("weight", int),), create_using=nx.Graph ) cls.p2p_subgraph = nx.read_edgelist( p2p_sub_file, nodetype=int, data=True, create_using=nx.DiGraph ) cls.p2p_subgraph_undirected = nx.read_edgelist( p2p_sub_file, nodetype=int, data=True, create_using=nx.Graph ) cls.p2p_length_ans = dict( pd.read_csv( "{}/p2p-31-sssp".format(data_dir), sep=" ", header=None, prefix="" ).values ) cls.p2p_dc_ans = dict( pd.read_csv( "{}/p2p-31-degree_centrality".format(data_dir), sep="\t", header=None, prefix="", ).values ) cls.p2p_ev_ans = dict( pd.read_csv( "{}/p2p-31-eigenvector".format(data_dir), sep=" ", header=None, prefix="", ).values ) cls.p2p_kz_ans = dict( pd.read_csv( "{}/p2p-31-katz".format(data_dir), sep="\t", header=None, prefix="" ).values ) cls.p2p_hits_ans = pd.read_csv( "{}/p2p-31-hits-directed".format(data_dir), sep="\t", header=None, prefix="" ) cls.p2p_pagerank_ans = dict( pd.read_csv( "{}/p2p-31-pagerank_nx".format(data_dir), sep="\t", header=None, prefix="", ).values ) cls.p2p_clus_ans = dict( pd.read_csv( "{}/p2p-31-clustering".format(data_dir), sep=" ", header=None, prefix="" ).values ) cls.p2p_triangles_ans = dict( pd.read_csv( "{}/p2p-31-triangles".format(data_dir), sep=" ", header=None, prefix="" ).values ) cls.p2p_kcore_ans = sorted( pd.read_csv( "{}/p2p-31-kcore".format(data_dir), sep=" ", header=None, prefix="" ).values ) cls.empty_pagerank_ans = {} def assert_result_almost_equal(self, r1, r2): assert len(r1) == len(r2) for k in r1.keys(): assert almost_equal(r1[k], r2[k]) def test_single_source_dijkstra_path_length(self): ret = nx.builtin.single_source_dijkstra_path_length( self.grid, 1, weight="weight" ) assert ret == self.grid_ans ret = nx.builtin.single_source_dijkstra_path_length( self.p2p_undirected, 6, weight="weight" ) assert replace_with_inf(ret) == self.p2p_length_ans @pytest.mark.skip(reason="TODO: subgraph not ready") def test_subgraph_single_source_dijkstra_path_length(self): # test subgraph and edge_subgraph with p2p_subgraph_undirected ret = nx.builtin.single_source_dijkstra_path_length( self.p2p_subgraph_undirected, 6, weight="weight" ) SG = self.p2p_undirected.subgraph(self.p2p_subgraph_undirected.nodes) ret_sg = nx.builtin.single_source_dijkstra_path_length(SG, 6, weight="weight") assert ret == ret_sg ESG = self.p2p_undirected.edge_subgraph(self.p2p_subgraph_undirected.edges) ret_esg = nx.builtin.single_source_dijkstra_path_length(ESG, 6, weight="weight") assert ret == ret_esg # test subgraph and edge_subgraph with p2p directed ret2 = nx.builtin.single_source_dijkstra_path_length( self.p2p_subgraph, 6, weight="weight" ) SDG = self.p2p.subgraph(self.p2p_subgraph.nodes) ret_sdg = nx.builtin.single_source_dijkstra_path_length(SDG, 6, weight="weight") assert ret2 == ret_sdg ESDG = self.p2p.edge_subgraph(self.p2p_subgraph.edges) ret_esdg = nx.builtin.single_source_dijkstra_path_length( ESDG, 6, weight="weight" ) assert ret2 == ret_esdg def test_shortest_path(self): ctx1 = nx.builtin.shortest_path(self.grid, source=1, weight="weight") ret1 = dict(ctx1.to_numpy("r")) if os.environ.get("DEPLOYMENT", None) == "standalone": # the successor of 2 is 6 on standalone self.grid_path_ans[2] = 6 assert ret1 == self.grid_path_ans def test_has_path(self): assert nx.builtin.has_path(self.grid, source=1, target=6) assert not nx.builtin.has_path(self.p2p, source=6, target=3728) assert nx.builtin.has_path(self.p2p, source=6, target=3723) def test_average_shortest_path_length(self): ret = nx.builtin.average_shortest_path_length(self.grid, weight="weight") assert ret == 2.6666666666666665 def test_degree_centrality(self): ans = nx.builtin.degree_centrality(self.p2p) self.assert_result_almost_equal(ans, self.p2p_dc_ans) def test_eigenvector_centrality(self): ans = nx.builtin.eigenvector_centrality(self.p2p, weight="weight") self.assert_result_almost_equal(ans, self.p2p_ev_ans) def test_katz_centrality(self): ans = nx.builtin.katz_centrality(self.p2p) self.assert_result_almost_equal(ans, self.p2p_kz_ans) def test_hits(self): expected_hub = dict( zip( self.p2p_hits_ans[0].to_numpy(dtype=int), self.p2p_hits_ans[1].to_numpy(dtype=float), ) ) expected_auth = dict( zip( self.p2p_hits_ans[0].to_numpy(dtype=int), self.p2p_hits_ans[2].to_numpy(dtype=float), ) ) hub, auth = nx.builtin.hits(self.p2p, tol=0.001) self.assert_result_almost_equal(hub, expected_hub) self.assert_result_almost_equal(auth, expected_auth) def test_clustering(self): ans = nx.builtin.clustering(self.p2p) self.assert_result_almost_equal(ans, self.p2p_clus_ans) def test_triangles(self): ans = nx.builtin.triangles(self.p2p_undirected) self.assert_result_almost_equal(ans, self.p2p_triangles_ans) def test_average_clustering(self): ret = nx.builtin.average_clustering(self.p2p_undirected) def test_weakly_connected_components(self): ret = nx.builtin.weakly_connected_components(self.p2p_undirected) def test_pagerank(self): ans = nx.builtin.pagerank(self.p2p) self.assert_result_almost_equal(ans, self.p2p_pagerank_ans) def test_degree_assortativity_coefficient(self): ans = nx.builtin.degree_assortativity_coefficient(self.p2p_undirected) assert almost_equal(ans, -0.0925578, places=5) def test_average_degree_connectivity(self): gt = { 23: 12.079051383399209, 36: 12.213541666666666, 6: 13.41948833709556, 13: 10.631225667874883, 15: 11.13222089789432, 31: 12.525345622119815, 18: 11.670812603648425, 22: 12.205342902711324, 20: 12.039447236180905, 7: 13.233418367346939, 3: 13.822278481012658, 2: 14.043461910390029, 17: 11.186960563006242, 8: 13.315128504672897, 5: 13.480509745127437, 1: 14.27883608959598, 4: 13.57874109263658, 27: 12.93676603432701, 12: 10.081949352179034, 11: 9.594664409134323, 16: 11.09459175084175, 21: 12.016680118375033, 19: 11.686204146730462, 25: 12.745416666666667, 30: 12.973563218390805, 42: 10.657738095238095, 28: 12.34593837535014, 10: 9.59122965641953, 14: 10.633949982870845, 24: 12.37938596491228, 26: 12.934065934065934, 33: 10.980392156862745, 39: 10.663003663003662, 32: 11.841911764705882, 9: 12.373635600335852, 35: 10.806722689075631, 34: 11.237851662404092, 45: 10.526984126984127, 29: 12.315958299919808, 46: 10.108695652173912, 37: 10.373464373464374, 70: 13.642857142857142, 40: 9.74375, 41: 11.207317073170731, 38: 10.493927125506072, 43: 13.023255813953488, 44: 10.655844155844155, 55: 7.472727272727273, 49: 9.755102040816327, 95: 9.863157894736842, 47: 9.063829787234043, 78: 10.205128205128204, 62: 9.274193548387096, 66: 10.378787878787879, 48: 9.552083333333334, 51: 9.882352941176471, } ans = nx.builtin.average_degree_connectivity(self.p2p_undirected) assert gt == ans @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="FIXME(acezen): DynamicFragment not store edges of outer vertex.", ) def test_voterank(self): gt = [ 9788, 17325, 585, 50445, 28802, 2550, 61511, 5928, 29965, 38767, 57802, 52032, 44619, 13596, 59426, 454, 58170, 3544, 364, 5530, ] ans = nx.builtin.voterank(self.p2p_undirected, 20) assert gt == ans gt = [9788, 17325, 50445, 28802, 61511, 57802, 52032, 29965] ans = nx.builtin.voterank(self.p2p, 8) assert gt == ans @pytest.mark.skip(reason="TODO: the app not compatible with DynamicFragment") def test_all_simple_paths(self): ans = nx.builtin.all_simple_paths(self.p2p, 1, 4, cutoff=10) assert len(ans) == 1022 ans = nx.builtin.all_simple_paths(self.p2p_undirected, 1, [4, 6], cutoff=5) assert len(ans) == 1675 def test_pagerank_on_empty(self): eg = nx.null_graph() ans = nx.builtin.pagerank(eg) self.assert_result_almost_equal(ans, self.empty_pagerank_ans)
[ "graphscope.nx.tests.utils.almost_equal", "graphscope.nx.builtin.has_path", "graphscope.nx.builtin.eigenvector_centrality", "graphscope.nx.builtin.katz_centrality", "pytest.mark.usefixtures", "graphscope.nx.read_edgelist", "os.environ.get", "pytest.mark.skip", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.builtin.pagerank", "graphscope.nx.builtin.average_shortest_path_length", "graphscope.nx.null_graph", "graphscope.nx.builtin.clustering", "graphscope.nx.builtin.degree_centrality", "graphscope.nx.builtin.shortest_path", "os.path.expandvars", "graphscope.nx.builtin.weakly_connected_components", "graphscope.nx.builtin.triangles", "graphscope.nx.builtin.average_degree_connectivity", "graphscope.nx.builtin.voterank", "graphscope.nx.builtin.degree_assortativity_coefficient", "graphscope.nx.Graph", "graphscope.nx.utils.misc.replace_with_inf", "graphscope.nx.builtin.all_simple_paths", "graphscope.nx.builtin.average_clustering", "graphscope.nx.builtin.hits" ]
[((847, 892), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (870, 892), False, 'import pytest\n'), ((5267, 5318), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""TODO: subgraph not ready"""'}), "(reason='TODO: subgraph not ready')\n", (5283, 5318), False, 'import pytest\n'), ((12208, 12284), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""TODO: the app not compatible with DynamicFragment"""'}), "(reason='TODO: the app not compatible with DynamicFragment')\n", (12224, 12284), False, 'import pytest\n'), ((1515, 1525), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1523, 1525), False, 'from graphscope import nx\n'), ((2200, 2236), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}"""'], {}), "('${GS_TEST_DIR}')\n", (2218, 2236), False, 'import os\n'), ((2256, 2301), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/p2p-31.e"""'], {}), "('${GS_TEST_DIR}/p2p-31.e')\n", (2274, 2301), False, 'import os\n'), ((2325, 2402), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/dynamic/p2p-31_dynamic_subgraph.edgelist"""'], {}), "('${GS_TEST_DIR}/dynamic/p2p-31_dynamic_subgraph.edgelist')\n", (2343, 2402), False, 'import os\n'), ((2443, 2537), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['p2p_file'], {'nodetype': 'int', 'data': "(('weight', int),)", 'create_using': 'nx.DiGraph'}), "(p2p_file, nodetype=int, data=(('weight', int),),\n create_using=nx.DiGraph)\n", (2459, 2537), False, 'from graphscope import nx\n'), ((2585, 2677), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['p2p_file'], {'nodetype': 'int', 'data': "(('weight', int),)", 'create_using': 'nx.Graph'}), "(p2p_file, nodetype=int, data=(('weight', int),),\n create_using=nx.Graph)\n", (2601, 2677), False, 'from graphscope import nx\n'), ((2723, 2808), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['p2p_sub_file'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'nx.DiGraph'}), '(p2p_sub_file, nodetype=int, data=True, create_using=nx.DiGraph\n )\n', (2739, 2808), False, 'from graphscope import nx\n'), ((2864, 2942), 'graphscope.nx.read_edgelist', 'nx.read_edgelist', (['p2p_sub_file'], {'nodetype': 'int', 'data': '(True)', 'create_using': 'nx.Graph'}), '(p2p_sub_file, nodetype=int, data=True, create_using=nx.Graph)\n', (2880, 2942), False, 'from graphscope import nx\n'), ((4942, 5018), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.grid', '(1)'], {'weight': '"""weight"""'}), "(self.grid, 1, weight='weight')\n", (4987, 5018), False, 'from graphscope import nx\n'), ((5092, 5182), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.p2p_undirected', '(6)'], {'weight': '"""weight"""'}), "(self.p2p_undirected, 6,\n weight='weight')\n", (5137, 5182), False, 'from graphscope import nx\n'), ((5468, 5567), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.p2p_subgraph_undirected', '(6)'], {'weight': '"""weight"""'}), "(self.p2p_subgraph_undirected,\n 6, weight='weight')\n", (5513, 5567), False, 'from graphscope import nx\n'), ((5681, 5750), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['SG', '(6)'], {'weight': '"""weight"""'}), "(SG, 6, weight='weight')\n", (5726, 5750), False, 'from graphscope import nx\n'), ((5882, 5952), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['ESG', '(6)'], {'weight': '"""weight"""'}), "(ESG, 6, weight='weight')\n", (5927, 5952), False, 'from graphscope import nx\n'), ((6059, 6148), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.p2p_subgraph', '(6)'], {'weight': '"""weight"""'}), "(self.p2p_subgraph, 6, weight=\n 'weight')\n", (6104, 6148), False, 'from graphscope import nx\n'), ((6241, 6311), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['SDG', '(6)'], {'weight': '"""weight"""'}), "(SDG, 6, weight='weight')\n", (6286, 6311), False, 'from graphscope import nx\n'), ((6425, 6496), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['ESDG', '(6)'], {'weight': '"""weight"""'}), "(ESDG, 6, weight='weight')\n", (6470, 6496), False, 'from graphscope import nx\n'), ((6601, 6663), 'graphscope.nx.builtin.shortest_path', 'nx.builtin.shortest_path', (['self.grid'], {'source': '(1)', 'weight': '"""weight"""'}), "(self.grid, source=1, weight='weight')\n", (6625, 6663), False, 'from graphscope import nx\n'), ((6944, 6994), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.grid'], {'source': '(1)', 'target': '(6)'}), '(self.grid, source=1, target=6)\n', (6963, 6994), False, 'from graphscope import nx\n'), ((7082, 7134), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.p2p'], {'source': '(6)', 'target': '(3723)'}), '(self.p2p, source=6, target=3723)\n', (7101, 7134), False, 'from graphscope import nx\n'), ((7199, 7266), 'graphscope.nx.builtin.average_shortest_path_length', 'nx.builtin.average_shortest_path_length', (['self.grid'], {'weight': '"""weight"""'}), "(self.grid, weight='weight')\n", (7238, 7266), False, 'from graphscope import nx\n'), ((7361, 7399), 'graphscope.nx.builtin.degree_centrality', 'nx.builtin.degree_centrality', (['self.p2p'], {}), '(self.p2p)\n', (7389, 7399), False, 'from graphscope import nx\n'), ((7520, 7580), 'graphscope.nx.builtin.eigenvector_centrality', 'nx.builtin.eigenvector_centrality', (['self.p2p'], {'weight': '"""weight"""'}), "(self.p2p, weight='weight')\n", (7553, 7580), False, 'from graphscope import nx\n'), ((7694, 7730), 'graphscope.nx.builtin.katz_centrality', 'nx.builtin.katz_centrality', (['self.p2p'], {}), '(self.p2p)\n', (7720, 7730), False, 'from graphscope import nx\n'), ((8216, 8252), 'graphscope.nx.builtin.hits', 'nx.builtin.hits', (['self.p2p'], {'tol': '(0.001)'}), '(self.p2p, tol=0.001)\n', (8231, 8252), False, 'from graphscope import nx\n'), ((8419, 8450), 'graphscope.nx.builtin.clustering', 'nx.builtin.clustering', (['self.p2p'], {}), '(self.p2p)\n', (8440, 8450), False, 'from graphscope import nx\n'), ((8560, 8601), 'graphscope.nx.builtin.triangles', 'nx.builtin.triangles', (['self.p2p_undirected'], {}), '(self.p2p_undirected)\n', (8580, 8601), False, 'from graphscope import nx\n'), ((8725, 8775), 'graphscope.nx.builtin.average_clustering', 'nx.builtin.average_clustering', (['self.p2p_undirected'], {}), '(self.p2p_undirected)\n', (8754, 8775), False, 'from graphscope import nx\n'), ((8839, 8898), 'graphscope.nx.builtin.weakly_connected_components', 'nx.builtin.weakly_connected_components', (['self.p2p_undirected'], {}), '(self.p2p_undirected)\n', (8877, 8898), False, 'from graphscope import nx\n'), ((8943, 8972), 'graphscope.nx.builtin.pagerank', 'nx.builtin.pagerank', (['self.p2p'], {}), '(self.p2p)\n', (8962, 8972), False, 'from graphscope import nx\n'), ((9109, 9173), 'graphscope.nx.builtin.degree_assortativity_coefficient', 'nx.builtin.degree_assortativity_coefficient', (['self.p2p_undirected'], {}), '(self.p2p_undirected)\n', (9152, 9173), False, 'from graphscope import nx\n'), ((9189, 9228), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['ans', '(-0.0925578)'], {'places': '(5)'}), '(ans, -0.0925578, places=5)\n', (9201, 9228), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((11295, 11354), 'graphscope.nx.builtin.average_degree_connectivity', 'nx.builtin.average_degree_connectivity', (['self.p2p_undirected'], {}), '(self.p2p_undirected)\n', (11333, 11354), False, 'from graphscope import nx\n'), ((11991, 12035), 'graphscope.nx.builtin.voterank', 'nx.builtin.voterank', (['self.p2p_undirected', '(20)'], {}), '(self.p2p_undirected, 20)\n', (12010, 12035), False, 'from graphscope import nx\n'), ((12144, 12176), 'graphscope.nx.builtin.voterank', 'nx.builtin.voterank', (['self.p2p', '(8)'], {}), '(self.p2p, 8)\n', (12163, 12176), False, 'from graphscope import nx\n'), ((12336, 12390), 'graphscope.nx.builtin.all_simple_paths', 'nx.builtin.all_simple_paths', (['self.p2p', '(1)', '(4)'], {'cutoff': '(10)'}), '(self.p2p, 1, 4, cutoff=10)\n', (12363, 12390), False, 'from graphscope import nx\n'), ((12437, 12506), 'graphscope.nx.builtin.all_simple_paths', 'nx.builtin.all_simple_paths', (['self.p2p_undirected', '(1)', '[4, 6]'], {'cutoff': '(5)'}), '(self.p2p_undirected, 1, [4, 6], cutoff=5)\n', (12464, 12506), False, 'from graphscope import nx\n'), ((12591, 12606), 'graphscope.nx.null_graph', 'nx.null_graph', ([], {}), '()\n', (12604, 12606), False, 'from graphscope import nx\n'), ((12621, 12644), 'graphscope.nx.builtin.pagerank', 'nx.builtin.pagerank', (['eg'], {}), '(eg)\n', (12640, 12644), False, 'from graphscope import nx\n'), ((4845, 4871), 'graphscope.nx.tests.utils.almost_equal', 'almost_equal', (['r1[k]', 'r2[k]'], {}), '(r1[k], r2[k])\n', (4857, 4871), False, 'from graphscope.nx.tests.utils import almost_equal\n'), ((5216, 5237), 'graphscope.nx.utils.misc.replace_with_inf', 'replace_with_inf', (['ret'], {}), '(ret)\n', (5232, 5237), False, 'from graphscope.nx.utils.misc import replace_with_inf\n'), ((6715, 6749), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (6729, 6749), False, 'import os\n'), ((7014, 7066), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.p2p'], {'source': '(6)', 'target': '(3728)'}), '(self.p2p, source=6, target=3728)\n', (7033, 7066), False, 'from graphscope import nx\n'), ((11414, 11448), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (11428, 11448), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import numpy as np import graphscope from graphscope import sssp from graphscope.dataset.ldbc import load_ldbc from graphscope.framework.loader import Loader def p2p_property_graph(num_workers, directed=True): data_dir = os.path.expandvars("${GS_TEST_DIR}/property") graphscope.set_option(show_log=True) graphscope.set_option(initializing_interactive_engine=False) sess = graphscope.session(num_workers=num_workers, run_on_local=True) graph = graphscope.Graph(sess, directed=directed) graph = graph.add_vertices("{}/p2p-31_property_v_0".format(data_dir), "person") graph = graph.add_edges("{}/p2p-31_property_e_0".format(data_dir), "knows") return sess, graph def test_sssp(): prev_result = None for num_workers in (1, 2, 3, 4): sess, g = p2p_property_graph(num_workers, True) sg = g.project_to_simple(0, 0, 0, 2) ctx = sssp(sg, 6) curr_result = ( ctx.to_dataframe({"node": "v.id", "result": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) if prev_result is not None and not np.array_equal(prev_result, curr_result): raise RuntimeError( "Result is not consistent with different workers, current number of workers: %d", num_workers, ) prev_result = curr_result sess.close()
[ "numpy.array_equal", "graphscope.set_option", "os.path.expandvars", "graphscope.session", "graphscope.Graph", "graphscope.sssp" ]
[((906, 951), 'os.path.expandvars', 'os.path.expandvars', (['"""${GS_TEST_DIR}/property"""'], {}), "('${GS_TEST_DIR}/property')\n", (924, 951), False, 'import os\n'), ((956, 992), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (977, 992), False, 'import graphscope\n'), ((997, 1057), 'graphscope.set_option', 'graphscope.set_option', ([], {'initializing_interactive_engine': '(False)'}), '(initializing_interactive_engine=False)\n', (1018, 1057), False, 'import graphscope\n'), ((1069, 1131), 'graphscope.session', 'graphscope.session', ([], {'num_workers': 'num_workers', 'run_on_local': '(True)'}), '(num_workers=num_workers, run_on_local=True)\n', (1087, 1131), False, 'import graphscope\n'), ((1144, 1185), 'graphscope.Graph', 'graphscope.Graph', (['sess'], {'directed': 'directed'}), '(sess, directed=directed)\n', (1160, 1185), False, 'import graphscope\n'), ((1568, 1579), 'graphscope.sssp', 'sssp', (['sg', '(6)'], {}), '(sg, 6)\n', (1572, 1579), False, 'from graphscope import sssp\n'), ((1790, 1830), 'numpy.array_equal', 'np.array_equal', (['prev_result', 'curr_result'], {}), '(prev_result, curr_result)\n', (1804, 1830), True, 'import numpy as np\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import networkx.classes.tests.test_function as func_tests import pytest from networkx.utils import edges_equal from networkx.utils import nodes_equal from graphscope import nx from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx( func_tests, decorators=pytest.mark.usefixtures("graphscope_session") ) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(func_tests.TestFunction) class TestFunction: # subgraph in graphscope.nx is deep copy def test_subgraph(self): assert ( self.G.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.G, [0, 1, 2, 4]).adj ) assert ( self.DG.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.DG, [0, 1, 2, 4]).adj ) assert ( self.G.subgraph([0, 1, 2, 4]).adj == nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj ) assert ( self.DG.subgraph([0, 1, 2, 4]).adj == nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj ) H = nx.induced_subgraph(self.G.subgraph([0, 1, 2, 4]), [0, 1, 4]) assert H.adj == self.G.subgraph([0, 1, 4]).adj @pytest.mark.skip(reason="info api would be deprecated in networkx 3.0") def test_info(self): pass @pytest.mark.parametrize( "graph_type", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) ) @pytest.mark.usefixtures("graphscope_session") def test_set_node_attributes(graph_type): # Test single value G = nx.path_graph(3, create_using=graph_type) vals = 100 attr = "hello" nx.set_node_attributes(G, vals, attr) assert G.nodes[0][attr] == vals assert G.nodes[1][attr] == vals assert G.nodes[2][attr] == vals # Test dictionary G = nx.path_graph(3, create_using=graph_type) vals = dict(zip(sorted(G.nodes()), range(len(G)))) attr = "hi" nx.set_node_attributes(G, vals, attr) assert G.nodes[0][attr] == 0 assert G.nodes[1][attr] == 1 assert G.nodes[2][attr] == 2 # Test dictionary of dictionaries G = nx.path_graph(3, create_using=graph_type) d = {"hi": 0, "hello": 200} vals = dict.fromkeys(G.nodes(), d) vals.pop(0) nx.set_node_attributes(G, vals) assert G.nodes[0] == {} assert G.nodes[1]["hi"] == 0 assert G.nodes[2]["hello"] == 200 @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) @pytest.mark.usefixtures("graphscope_session") def test_set_edge_attributes(graph_type): # Test single value G = nx.path_graph(3, create_using=graph_type) attr = "hello" vals = 3 nx.set_edge_attributes(G, vals, attr) assert G[0][1][attr] == vals assert G[1][2][attr] == vals # Test multiple values G = nx.path_graph(3, create_using=graph_type) attr = "hi" edges = [(0, 1), (1, 2)] vals = dict(zip(edges, range(len(edges)))) nx.set_edge_attributes(G, vals, attr) assert G[0][1][attr] == 0 assert G[1][2][attr] == 1 # Test dictionary of dictionaries G = nx.path_graph(3, create_using=graph_type) d = {"hi": 0, "hello": 200} edges = [(0, 1)] vals = dict.fromkeys(edges, d) nx.set_edge_attributes(G, vals) assert G[0][1]["hi"] == 0 assert G[0][1]["hello"] == 200 assert G[1][2] == {} @pytest.mark.parametrize( "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] ) @pytest.mark.usefixtures("graphscope_session") def test_selfloops(graph_type): G = nx.complete_graph(3, create_using=graph_type) G.add_edge(0, 0) assert nodes_equal(nx.nodes_with_selfloops(G), [0]) assert edges_equal(nx.selfloop_edges(G), [(0, 0)]) assert edges_equal(nx.selfloop_edges(G, data=True), [(0, 0, {})]) assert nx.number_of_selfloops(G) == 1 @pytest.mark.parametrize( "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] ) @pytest.mark.usefixtures("graphscope_session") def test_selfloop_edges_attr(graph_type): G = nx.complete_graph(3, create_using=graph_type) G.add_edge(0, 0) G.add_edge(1, 1, weight=2) assert edges_equal( nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] ) assert edges_equal(nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)]) @pytest.mark.parametrize("graph_type", [nx.Graph, nx.DiGraph]) @pytest.mark.usefixtures("graphscope_session") def test_selfloops_removal(graph_type): G = nx.complete_graph(3, create_using=graph_type) G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G, keys=True)) G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G, data=True)) G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G, keys=True, data=True)) @pytest.mark.skip(reason="graphscope not support restricted view") def test_restricted_view(G): pass @pytest.mark.skip(reason="graphscope not support restricted view") def test_restricted_view_multi(G): pass @pytest.mark.skip(reason="graphscope not support ispath") def test_ispath(G): pass @pytest.mark.skip(reason="graphscope not support pathweight") def test_pathweight(G): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "graphscope.nx.number_of_selfloops", "graphscope.nx.set_node_attributes", "pytest.mark.skip", "graphscope.nx.nodes_with_selfloops", "graphscope.nx.induced_subgraph", "pytest.mark.parametrize", "graphscope.nx.complete_graph", "pytest.mark.usefixtures", "graphscope.nx.subgraph", "graphscope.nx.set_edge_attributes", "graphscope.nx.path_graph", "graphscope.nx.selfloop_edges" ]
[((810, 855), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (833, 855), False, 'import pytest\n'), ((857, 908), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['func_tests.TestFunction'], {}), '(func_tests.TestFunction)\n', (883, 908), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1751, 1848), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_type"""', '(nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)'], {}), "('graph_type', (nx.Graph, nx.DiGraph, nx.MultiGraph,\n nx.MultiDiGraph))\n", (1774, 1848), False, 'import pytest\n'), ((1852, 1897), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1875, 1897), False, 'import pytest\n'), ((2797, 2858), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_type"""', '(nx.Graph, nx.DiGraph)'], {}), "('graph_type', (nx.Graph, nx.DiGraph))\n", (2820, 2858), False, 'import pytest\n'), ((2860, 2905), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2883, 2905), False, 'import pytest\n'), ((3740, 3837), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_type"""', '[nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph]'], {}), "('graph_type', [nx.Graph, nx.DiGraph, nx.MultiGraph,\n nx.MultiDiGraph])\n", (3763, 3837), False, 'import pytest\n'), ((3841, 3886), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (3864, 3886), False, 'import pytest\n'), ((4220, 4317), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_type"""', '[nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph]'], {}), "('graph_type', [nx.Graph, nx.DiGraph, nx.MultiGraph,\n nx.MultiDiGraph])\n", (4243, 4317), False, 'import pytest\n'), ((4321, 4366), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (4344, 4366), False, 'import pytest\n'), ((4712, 4773), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_type"""', '[nx.Graph, nx.DiGraph]'], {}), "('graph_type', [nx.Graph, nx.DiGraph])\n", (4735, 4773), False, 'import pytest\n'), ((4775, 4820), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (4798, 4820), False, 'import pytest\n'), ((5163, 5228), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope not support restricted view"""'}), "(reason='graphscope not support restricted view')\n", (5179, 5228), False, 'import pytest\n'), ((5270, 5335), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope not support restricted view"""'}), "(reason='graphscope not support restricted view')\n", (5286, 5335), False, 'import pytest\n'), ((5383, 5439), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope not support ispath"""'}), "(reason='graphscope not support ispath')\n", (5399, 5439), False, 'import pytest\n'), ((5472, 5532), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope not support pathweight"""'}), "(reason='graphscope not support pathweight')\n", (5488, 5532), False, 'import pytest\n'), ((1638, 1709), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""info api would be deprecated in networkx 3.0"""'}), "(reason='info api would be deprecated in networkx 3.0')\n", (1654, 1709), False, 'import pytest\n'), ((1972, 2013), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (1985, 2013), False, 'from graphscope import nx\n'), ((2052, 2089), 'graphscope.nx.set_node_attributes', 'nx.set_node_attributes', (['G', 'vals', 'attr'], {}), '(G, vals, attr)\n', (2074, 2089), False, 'from graphscope import nx\n'), ((2229, 2270), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (2242, 2270), False, 'from graphscope import nx\n'), ((2346, 2383), 'graphscope.nx.set_node_attributes', 'nx.set_node_attributes', (['G', 'vals', 'attr'], {}), '(G, vals, attr)\n', (2368, 2383), False, 'from graphscope import nx\n'), ((2530, 2571), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (2543, 2571), False, 'from graphscope import nx\n'), ((2663, 2694), 'graphscope.nx.set_node_attributes', 'nx.set_node_attributes', (['G', 'vals'], {}), '(G, vals)\n', (2685, 2694), False, 'from graphscope import nx\n'), ((2980, 3021), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (2993, 3021), False, 'from graphscope import nx\n'), ((3058, 3095), 'graphscope.nx.set_edge_attributes', 'nx.set_edge_attributes', (['G', 'vals', 'attr'], {}), '(G, vals, attr)\n', (3080, 3095), False, 'from graphscope import nx\n'), ((3198, 3239), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (3211, 3239), False, 'from graphscope import nx\n'), ((3336, 3373), 'graphscope.nx.set_edge_attributes', 'nx.set_edge_attributes', (['G', 'vals', 'attr'], {}), '(G, vals, attr)\n', (3358, 3373), False, 'from graphscope import nx\n'), ((3481, 3522), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (3494, 3522), False, 'from graphscope import nx\n'), ((3615, 3646), 'graphscope.nx.set_edge_attributes', 'nx.set_edge_attributes', (['G', 'vals'], {}), '(G, vals)\n', (3637, 3646), False, 'from graphscope import nx\n'), ((3927, 3972), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (3944, 3972), False, 'from graphscope import nx\n'), ((4417, 4462), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (4434, 4462), False, 'from graphscope import nx\n'), ((4869, 4914), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(3)'], {'create_using': 'graph_type'}), '(3, create_using=graph_type)\n', (4886, 4914), False, 'from graphscope import nx\n'), ((759, 804), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (782, 804), False, 'import pytest\n'), ((4017, 4043), 'graphscope.nx.nodes_with_selfloops', 'nx.nodes_with_selfloops', (['G'], {}), '(G)\n', (4040, 4043), False, 'from graphscope import nx\n'), ((4073, 4093), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {}), '(G)\n', (4090, 4093), False, 'from graphscope import nx\n'), ((4128, 4159), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (4145, 4159), False, 'from graphscope import nx\n'), ((4186, 4211), 'graphscope.nx.number_of_selfloops', 'nx.number_of_selfloops', (['G'], {}), '(G)\n', (4208, 4211), False, 'from graphscope import nx\n'), ((4547, 4578), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (4564, 4578), False, 'from graphscope import nx\n'), ((4645, 4680), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '"""weight"""'}), "(G, data='weight')\n", (4662, 4680), False, 'from graphscope import nx\n'), ((4960, 4991), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'keys': '(True)'}), '(G, keys=True)\n', (4977, 4991), False, 'from graphscope import nx\n'), ((5038, 5069), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (5055, 5069), False, 'from graphscope import nx\n'), ((5116, 5158), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'keys': '(True)', 'data': '(True)'}), '(G, keys=True, data=True)\n', (5133, 5158), False, 'from graphscope import nx\n'), ((1069, 1102), 'graphscope.nx.subgraph', 'nx.subgraph', (['self.G', '[0, 1, 2, 4]'], {}), '(self.G, [0, 1, 2, 4])\n', (1080, 1102), False, 'from graphscope import nx\n'), ((1184, 1218), 'graphscope.nx.subgraph', 'nx.subgraph', (['self.DG', '[0, 1, 2, 4]'], {}), '(self.DG, [0, 1, 2, 4])\n', (1195, 1218), False, 'from graphscope import nx\n'), ((1311, 1352), 'graphscope.nx.induced_subgraph', 'nx.induced_subgraph', (['self.G', '[0, 1, 2, 4]'], {}), '(self.G, [0, 1, 2, 4])\n', (1330, 1352), False, 'from graphscope import nx\n'), ((1446, 1488), 'graphscope.nx.induced_subgraph', 'nx.induced_subgraph', (['self.DG', '[0, 1, 2, 4]'], {}), '(self.DG, [0, 1, 2, 4])\n', (1465, 1488), False, 'from graphscope import nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file convert.py is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/convert.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import warnings from collections.abc import Collection from collections.abc import Generator from collections.abc import Iterator import networkx.convert import graphscope from graphscope import nx from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import patch_docstring import_as_graphscope_nx(networkx.convert) @patch_docstring(networkx.convert.to_networkx_graph) def to_networkx_graph(data, create_using=None, multigraph_input=False): # noqa: C901 # graphscope graph if isinstance(data, graphscope.Graph): if create_using is None: raise nx.NetworkXError( "Use None to convert graphscope graph to networkx graph." ) # check session and direction compatible if data.session_id != create_using.session_id: raise nx.NetworkXError( "The source graph is not loaded in session {}." % create_using.session_id ) if data.is_directed() != create_using.is_directed(): if data.is_directed(): msg = "The source graph is a directed graph, can't be used to init nx.Graph. You may use nx.DiGraph" else: msg = "The source graph is a undirected graph, can't be used to init nx.DiGraph. You may use nx.Graph" raise nx.NetworkXError(msg) create_using._key = data.key create_using._schema = data.schema create_using._op = data.op if create_using._default_label is not None: try: create_using._default_label_id = ( create_using._schema.get_vertex_label_id( create_using._default_label ) ) except KeyError: raise nx.NetworkXError( "default label {} not existed in graph." % create_using._default_label ) create_using._graph_type = data.graph_type return # networkx graph or graphscope.nx graph if hasattr(data, "adj"): try: result = nx.from_dict_of_dicts( data.adj, create_using=create_using, multigraph_input=data.is_multigraph(), ) if hasattr(data, "graph"): # data.graph should be dict-like result.graph.update(data.graph) if hasattr(data, "nodes"): # data.nodes should be dict-like result.add_nodes_from(data.nodes.items()) return result except Exception as err: raise nx.NetworkXError( "Input is not a correct NetworkX-like graph." ) from err # dict of dicts/lists if isinstance(data, dict): try: return nx.from_dict_of_dicts( data, create_using=create_using, multigraph_input=multigraph_input ) except Exception as err: if multigraph_input is True: raise nx.NetworkXError( f"converting multigraph_input raised:\n{type(err)}: {err}" ) try: return nx.from_dict_of_lists(data, create_using=create_using) except Exception as err: raise TypeError("Input is not known type.") from err # Pandas DataFrame try: import pandas as pd if isinstance(data, pd.DataFrame): if data.shape[0] == data.shape[1]: try: return nx.from_pandas_adjacency(data, create_using=create_using) except Exception as err: msg = "Input is not a correct Pandas DataFrame adjacency matrix." raise nx.NetworkXError(msg) from err else: try: return nx.from_pandas_edgelist( data, edge_attr=True, create_using=create_using ) except Exception as err: msg = "Input is not a correct Pandas DataFrame edge-list." raise nx.NetworkXError(msg) from err except ImportError: msg = "pandas not found, skipping conversion test." warnings.warn(msg, ImportWarning) # numpy matrix or ndarray try: import numpy if isinstance(data, (numpy.matrix, numpy.ndarray)): try: return nx.from_numpy_matrix(data, create_using=create_using) except Exception as err: raise nx.NetworkXError( "Input is not a correct numpy matrix or array." ) from err except ImportError: warnings.warn("numpy not found, skipping conversion test.", ImportWarning) # scipy sparse matrix - any format try: import scipy if hasattr(data, "format"): try: return nx.from_scipy_sparse_matrix(data, create_using=create_using) except Exception as err: raise nx.NetworkXError( "Input is not a correct scipy sparse matrix type." ) from err except ImportError: warnings.warn("scipy not found, skipping conversion test.", ImportWarning) # Note: most general check - should remain last in order of execution # Includes containers (e.g. list, set, dict, etc.), generators, and # iterators (e.g. itertools.chain) of edges if isinstance(data, (Collection, Generator, Iterator)): try: return nx.from_edgelist(data, create_using=create_using) except Exception as err: raise nx.NetworkXError("Input is not a valid edge list") from err raise nx.NetworkXError("Input is not a known data type for conversion.") def to_nx_graph(nx_graph): import networkx if not nx_graph.is_directed() and not nx_graph.is_multigraph(): g = networkx.Graph() edges = nx_graph.edges.data() elif nx_graph.is_directed() and not nx_graph.is_multigraph(): g = networkx.DiGraph() edges = nx_graph.edges.data() elif not nx_graph.is_directed() and nx_graph.is_multigraph(): g = networkx.MultiGraph() edges = nx_graph.edges.data(keys=True) else: g = networkx.MultiDiGraph() edges = nx_graph.edges.data(keys=True) nodes = nx_graph.nodes.data() g.update(edges, nodes) g.graph.update(nx_graph.graph) return g
[ "graphscope.nx.from_dict_of_dicts", "graphscope.nx.from_pandas_edgelist", "graphscope.nx.from_scipy_sparse_matrix", "graphscope.nx.from_numpy_matrix", "networkx.MultiGraph", "graphscope.nx.utils.compat.patch_docstring", "warnings.warn", "networkx.MultiDiGraph", "graphscope.nx.from_pandas_adjacency", "networkx.Graph", "graphscope.nx.from_edgelist", "graphscope.nx.NetworkXError", "networkx.DiGraph", "graphscope.nx.utils.compat.import_as_graphscope_nx", "graphscope.nx.from_dict_of_lists" ]
[((805, 846), 'graphscope.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.convert'], {}), '(networkx.convert)\n', (828, 846), False, 'from graphscope.nx.utils.compat import import_as_graphscope_nx\n'), ((850, 901), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['networkx.convert.to_networkx_graph'], {}), '(networkx.convert.to_networkx_graph)\n', (865, 901), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((6211, 6277), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Input is not a known data type for conversion."""'], {}), "('Input is not a known data type for conversion.')\n", (6227, 6277), False, 'from graphscope import nx\n'), ((6408, 6424), 'networkx.Graph', 'networkx.Graph', ([], {}), '()\n', (6422, 6424), False, 'import networkx\n'), ((1105, 1180), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Use None to convert graphscope graph to networkx graph."""'], {}), "('Use None to convert graphscope graph to networkx graph.')\n", (1121, 1180), False, 'from graphscope import nx\n'), ((1333, 1428), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (["('The source graph is not loaded in session {}.' % create_using.session_id)"], {}), "('The source graph is not loaded in session {}.' %\n create_using.session_id)\n", (1349, 1428), False, 'from graphscope import nx\n'), ((1839, 1860), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['msg'], {}), '(msg)\n', (1855, 1860), False, 'from graphscope import nx\n'), ((3305, 3399), 'graphscope.nx.from_dict_of_dicts', 'nx.from_dict_of_dicts', (['data'], {'create_using': 'create_using', 'multigraph_input': 'multigraph_input'}), '(data, create_using=create_using, multigraph_input=\n multigraph_input)\n', (3326, 3399), False, 'from graphscope import nx\n'), ((4733, 4766), 'warnings.warn', 'warnings.warn', (['msg', 'ImportWarning'], {}), '(msg, ImportWarning)\n', (4746, 4766), False, 'import warnings\n'), ((5187, 5261), 'warnings.warn', 'warnings.warn', (['"""numpy not found, skipping conversion test."""', 'ImportWarning'], {}), "('numpy not found, skipping conversion test.', ImportWarning)\n", (5200, 5261), False, 'import warnings\n'), ((5677, 5751), 'warnings.warn', 'warnings.warn', (['"""scipy not found, skipping conversion test."""', 'ImportWarning'], {}), "('scipy not found, skipping conversion test.', ImportWarning)\n", (5690, 5751), False, 'import warnings\n'), ((6039, 6088), 'graphscope.nx.from_edgelist', 'nx.from_edgelist', (['data'], {'create_using': 'create_using'}), '(data, create_using=create_using)\n', (6055, 6088), False, 'from graphscope import nx\n'), ((6541, 6559), 'networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (6557, 6559), False, 'import networkx\n'), ((3112, 3175), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Input is not a correct NetworkX-like graph."""'], {}), "('Input is not a correct NetworkX-like graph.')\n", (3128, 3175), False, 'from graphscope import nx\n'), ((4929, 4982), 'graphscope.nx.from_numpy_matrix', 'nx.from_numpy_matrix', (['data'], {'create_using': 'create_using'}), '(data, create_using=create_using)\n', (4949, 4982), False, 'from graphscope import nx\n'), ((5409, 5469), 'graphscope.nx.from_scipy_sparse_matrix', 'nx.from_scipy_sparse_matrix', (['data'], {'create_using': 'create_using'}), '(data, create_using=create_using)\n', (5436, 5469), False, 'from graphscope import nx\n'), ((6140, 6190), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Input is not a valid edge list"""'], {}), "('Input is not a valid edge list')\n", (6156, 6190), False, 'from graphscope import nx\n'), ((6676, 6697), 'networkx.MultiGraph', 'networkx.MultiGraph', ([], {}), '()\n', (6695, 6697), False, 'import networkx\n'), ((6767, 6790), 'networkx.MultiDiGraph', 'networkx.MultiDiGraph', ([], {}), '()\n', (6788, 6790), False, 'import networkx\n'), ((2301, 2394), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (["('default label {} not existed in graph.' % create_using._default_label)"], {}), "('default label {} not existed in graph.' % create_using.\n _default_label)\n", (2317, 2394), False, 'from graphscope import nx\n'), ((3676, 3730), 'graphscope.nx.from_dict_of_lists', 'nx.from_dict_of_lists', (['data'], {'create_using': 'create_using'}), '(data, create_using=create_using)\n', (3697, 3730), False, 'from graphscope import nx\n'), ((4037, 4094), 'graphscope.nx.from_pandas_adjacency', 'nx.from_pandas_adjacency', (['data'], {'create_using': 'create_using'}), '(data, create_using=create_using)\n', (4061, 4094), False, 'from graphscope import nx\n'), ((4345, 4417), 'graphscope.nx.from_pandas_edgelist', 'nx.from_pandas_edgelist', (['data'], {'edge_attr': '(True)', 'create_using': 'create_using'}), '(data, edge_attr=True, create_using=create_using)\n', (4368, 4417), False, 'from graphscope import nx\n'), ((5042, 5107), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Input is not a correct numpy matrix or array."""'], {}), "('Input is not a correct numpy matrix or array.')\n", (5058, 5107), False, 'from graphscope import nx\n'), ((5529, 5597), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['"""Input is not a correct scipy sparse matrix type."""'], {}), "('Input is not a correct scipy sparse matrix type.')\n", (5545, 5597), False, 'from graphscope import nx\n'), ((4248, 4269), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['msg'], {}), '(msg)\n', (4264, 4269), False, 'from graphscope import nx\n'), ((4610, 4631), 'graphscope.nx.NetworkXError', 'nx.NetworkXError', (['msg'], {}), '(msg)\n', (4626, 4631), False, 'from graphscope import nx\n')]
"""Unit tests for the :mod:`graphscope.nx.generators.expanders` module. """ # fmt: off import pytest from networkx import adjacency_matrix from networkx.generators.tests.test_expanders import test_chordal_cycle_graph from networkx.generators.tests.test_expanders import test_margulis_gabber_galil_graph from networkx.generators.tests.test_expanders import \ test_margulis_gabber_galil_graph_badinput #fmt: off try: from networkx.generators.tests.test_expanders import test_paley_graph except ImportError: # NetworkX<=2.4 not contains paley_graph test_paley_graph = lambda: None import graphscope.nx as nx from graphscope.nx import number_of_nodes from graphscope.nx.generators.expanders import chordal_cycle_graph from graphscope.nx.generators.expanders import margulis_gabber_galil_graph from graphscope.nx.utils.compat import with_graphscope_nx_context try: from graphscope.nx.generators.expanders import paley_graph except ImportError: # NetworkX <= 2.4 pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_margulis_gabber_galil_graph) def test_margulis_gabber_galil_graph(): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_chordal_cycle_graph) def test_chordal_cycle_graph(): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_margulis_gabber_galil_graph_badinput) def test_margulis_gabber_galil_graph_badinput(): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(test_paley_graph) def test_paley_graph(): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((1000, 1045), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1023, 1045), False, 'import pytest\n'), ((1047, 1107), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_margulis_gabber_galil_graph'], {}), '(test_margulis_gabber_galil_graph)\n', (1073, 1107), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1160, 1205), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1183, 1205), False, 'import pytest\n'), ((1207, 1259), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_chordal_cycle_graph'], {}), '(test_chordal_cycle_graph)\n', (1233, 1259), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1304, 1349), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1327, 1349), False, 'import pytest\n'), ((1351, 1420), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_margulis_gabber_galil_graph_badinput'], {}), '(test_margulis_gabber_galil_graph_badinput)\n', (1377, 1420), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1482, 1527), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1505, 1527), False, 'import pytest\n'), ((1529, 1573), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['test_paley_graph'], {}), '(test_paley_graph)\n', (1555, 1573), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import datetime import logging import random from concurrent.futures import ThreadPoolExecutor from copy import deepcopy from enum import Enum from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection from gremlin_python.process.anonymous_traversal import traversal from graphscope.config import GSConfig as gs_config from graphscope.framework.dag import DAGNode from graphscope.framework.dag_utils import close_interactive_query from graphscope.framework.dag_utils import create_interactive_query from graphscope.framework.dag_utils import fetch_gremlin_result from graphscope.framework.dag_utils import gremlin_query from graphscope.framework.dag_utils import gremlin_to_subgraph from graphscope.framework.loader import Loader logger = logging.getLogger("graphscope") class InteractiveQueryStatus(Enum): """A enumeration class of current status of InteractiveQuery""" Initializing = 0 Running = 1 Failed = 2 Closed = 3 class ResultSetDAGNode(DAGNode): """A class represents a result set node in a DAG. This is a wrapper for :class:`gremlin_python.driver.resultset.ResultSet`, and you can get the result by :method:`one()` or :method:`all()`. """ def __init__(self, dag_node, op): self._session = dag_node.session self._op = op # add op to dag self._session.dag.add_op(self._op) def one(self): """See details in :method:`gremlin_python.driver.resultset.ResultSet.one`""" # avoid circular import from graphscope.framework.context import ResultDAGNode op = fetch_gremlin_result(self, "one") return ResultDAGNode(self, op) def all(self): """See details in :method:`gremlin_python.driver.resultset.ResultSet.all` Note that this method is equal to `ResultSet.all().result()` """ # avoid circular import from graphscope.framework.context import ResultDAGNode op = fetch_gremlin_result(self, "all") return ResultDAGNode(self, op) class ResultSet(object): def __init__(self, result_set_node): self._result_set_node = result_set_node self._session = self._result_set_node.session # copy and set op evaluated self._result_set_node.op = deepcopy(self._result_set_node.op) self._result_set_node.evaluated = True self._session.dag.add_op(self._result_set_node.op) def one(self): return self._session._wrapper(self._result_set_node.one()) def all(self): return self._session._wrapper(self._result_set_node.all()) class InteractiveQueryDAGNode(DAGNode): """A class represents an interactive query node in a DAG. The following example demonstrates its usage: .. code:: python >>> # lazy node >>> import graphscope as gs >>> sess = gs.session(mode="lazy") >>> g = sess.g() # <graphscope.framework.graph.GraphDAGNode object> >>> ineractive = sess.gremlin(g) >>> print(ineractive) # <graphscope.interactive.query.InteractiveQueryDAGNode object> >>> rs = ineractive.execute("g.V()") >>> print(rs) # <graphscope.ineractive.query.ResultSetDAGNode object> >>> r = rs.one() >>> print(r) # <graphscope.framework.context.ResultDAGNode> >>> print(sess.run(r)) [2] >>> subgraph = ineractive.subgraph("xxx") >>> print(subgraph) # <graphscope.framework.graph.GraphDAGNode object> >>> g2 = sess.run(subgraph) >>> print(g2) # <graphscope.framework.graph.Graph object> """ def __init__(self, session, graph, engine_params=None): """ Args: session (:class:`Session`): instance of GraphScope session. graph (:class:`graphscope.framework.graph.GraphDAGNode`): A graph instance that the gremlin query on. engine_params (dict, optional): Configuration to startup the interactive engine. See detail in: `interactive_engine/deploy/docker/dockerfile/executor.vineyard.properties` """ self._session = session self._graph = graph self._engine_params = engine_params self._op = create_interactive_query( self._graph, self._engine_params, gs_config.k8s_gie_gremlin_server_cpu, gs_config.k8s_gie_gremlin_server_mem, ) # add op to dag self._session.dag.add_op(self._op) # TODO(yuansi): Support gaia def gaia(self): return self def execute(self, query, request_options=None): """Execute gremlin querying scripts. Args: query (str): Scripts that written in gremlin quering language. request_options (dict, optional): Gremlin request options. format: { "engine": "gae" } Returns: :class:`graphscope.framework.context.ResultDAGNode`: A result holds the gremlin result, evaluated in eager mode. """ op = gremlin_query(self, query, request_options) return ResultSetDAGNode(self, op) def subgraph(self, gremlin_script, request_options=None): """Create a subgraph, which input is the result of the execution of `gremlin_script`. Any gremlin script that output a set of edges can be used to contruct a subgraph. Args: gremlin_script (str): Gremlin script to be executed. request_options (dict, optional): Gremlin request options. format: { "engine": "gae" } Returns: :class:`graphscope.framework.graph.GraphDAGNode`: A new graph constructed by the gremlin output, that also stored in vineyard. """ # avoid circular import from graphscope.framework.graph import GraphDAGNode op = gremlin_to_subgraph( self, gremlin_script=gremlin_script, request_options=request_options, oid_type=self._graph._oid_type, ) return GraphDAGNode(self._session, op) def close(self): """Close interactive engine and release the resources. Returns: :class:`graphscope.interactive.query.ClosedInteractiveQuery` Evaluated in eager mode. """ op = close_interactive_query(self) return ClosedInteractiveQuery(self._session, op) class InteractiveQuery(object): """`InteractiveQuery` class, is a simple wrapper around `Gremlin-Python <https://pypi.org/project/gremlinpython/>`_, which implements Gremlin within the Python language. It also can expose gremlin endpoint which can be used by any other standard gremlin console, with the method `graph_url()`. It also has a method called `subgraph` which can extract some fragments from origin graph, produce a new, smaller but concise graph stored in vineyard, which lifetime is independent from the origin graph. User can either use `execute()` to submit a script, or use `traversal_source()` to get a `GraphTraversalSource` for further traversal. """ def __init__( self, interactive_query_node=None, frontend_endpoint=None, object_id=None ): """Construct a :class:`InteractiveQuery` object.""" self._status = InteractiveQueryStatus.Initializing self._graph_url = None # graph object id stored in vineyard self._object_id = object_id # interactive_query_node is None used for create a interative query # implicitly in eager mode if interactive_query_node is not None: self._interactive_query_node = interactive_query_node self._session = self._interactive_query_node.session # copy and set op evaluated self._interactive_query_node.op = deepcopy(self._interactive_query_node.op) self._interactive_query_node.evaluated = True self._session.dag.add_op(self._interactive_query_node.op) if frontend_endpoint is not None: self._graph_url = "ws://{0}/gremlin".format(frontend_endpoint) @property def graph_url(self): """The gremlin graph url can be used with any standard gremlin console, e.g., tinkerpop.""" return self._graph_url @property def status(self): return self._status @property def object_id(self): return self._object_id @status.setter def status(self, value): self._status = value @property def error_msg(self): return self._error_msg @error_msg.setter def error_msg(self, error_msg): self._error_msg = error_msg def closed(self): """Return if the current instance is closed.""" return self._status == InteractiveQueryStatus.Closed def gaia(self): if self._status != InteractiveQueryStatus.Running: raise RuntimeError( "Interactive query is unavailable with %s status.", str(self._status) ) return self._session._wrapper(self._interactive_query_node.gaia()) def subgraph(self, gremlin_script, request_options=None): if self._status != InteractiveQueryStatus.Running: raise RuntimeError( "Interactive query is unavailable with %s status.", str(self._status) ) return self._session._wrapper( self._interactive_query_node.subgraph(gremlin_script, request_options) ) def execute(self, query, request_options=None): if self._status != InteractiveQueryStatus.Running: raise RuntimeError( "Interactive query is unavailable with %s status.", str(self._status) ) return self._session._wrapper( self._interactive_query_node.execute(query, request_options) ) def traversal_source(self): """Create a GraphTraversalSource and return. Once `g` has been created using a connection, we can start to write Gremlin traversals to query the remote graph. Raises: RuntimeError: If the interactive script is not running. Examples: .. code:: python sess = graphscope.session() graph = load_modern_graph(sess, modern_graph_data_dir) interactive = sess.gremlin(graph) g = interactive.traversal_source() print(g.V().both()[1:3].toList()) print(g.V().both().name.toList()) Returns: `GraphTraversalSource` """ if self._status != InteractiveQueryStatus.Running: raise RuntimeError( "Interactive query is unavailable with %s status.", str(self._status) ) return traversal().withRemote(DriverRemoteConnection(self._graph_url, "g")) def close(self): """Close interactive instance and release resources""" if not self.closed() and not self._session.closed: self._session._wrapper(self._interactive_query_node.close()) self._session._close_interactive_instance(self) self._status = InteractiveQueryStatus.Closed class ClosedInteractiveQuery(DAGNode): """Closed interactive query node in a DAG.""" def __init__(self, session, op): self._session = session self._op = op # add op to dag self._session.dag.add_op(self._op)
[ "graphscope.framework.graph.GraphDAGNode", "copy.deepcopy", "logging.getLogger", "gremlin_python.driver.driver_remote_connection.DriverRemoteConnection", "graphscope.framework.dag_utils.fetch_gremlin_result", "graphscope.framework.dag_utils.gremlin_query", "graphscope.framework.dag_utils.close_interactive_query", "gremlin_python.process.anonymous_traversal.traversal", "graphscope.framework.dag_utils.create_interactive_query", "graphscope.framework.context.ResultDAGNode", "graphscope.framework.dag_utils.gremlin_to_subgraph" ]
[((1432, 1463), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1449, 1463), False, 'import logging\n'), ((2267, 2300), 'graphscope.framework.dag_utils.fetch_gremlin_result', 'fetch_gremlin_result', (['self', '"""one"""'], {}), "(self, 'one')\n", (2287, 2300), False, 'from graphscope.framework.dag_utils import fetch_gremlin_result\n'), ((2316, 2339), 'graphscope.framework.context.ResultDAGNode', 'ResultDAGNode', (['self', 'op'], {}), '(self, op)\n', (2329, 2339), False, 'from graphscope.framework.context import ResultDAGNode\n'), ((2633, 2666), 'graphscope.framework.dag_utils.fetch_gremlin_result', 'fetch_gremlin_result', (['self', '"""all"""'], {}), "(self, 'all')\n", (2653, 2666), False, 'from graphscope.framework.dag_utils import fetch_gremlin_result\n'), ((2682, 2705), 'graphscope.framework.context.ResultDAGNode', 'ResultDAGNode', (['self', 'op'], {}), '(self, op)\n', (2695, 2705), False, 'from graphscope.framework.context import ResultDAGNode\n'), ((2947, 2981), 'copy.deepcopy', 'deepcopy', (['self._result_set_node.op'], {}), '(self._result_set_node.op)\n', (2955, 2981), False, 'from copy import deepcopy\n'), ((4891, 5030), 'graphscope.framework.dag_utils.create_interactive_query', 'create_interactive_query', (['self._graph', 'self._engine_params', 'gs_config.k8s_gie_gremlin_server_cpu', 'gs_config.k8s_gie_gremlin_server_mem'], {}), '(self._graph, self._engine_params, gs_config.\n k8s_gie_gremlin_server_cpu, gs_config.k8s_gie_gremlin_server_mem)\n', (4915, 5030), False, 'from graphscope.framework.dag_utils import create_interactive_query\n'), ((5737, 5780), 'graphscope.framework.dag_utils.gremlin_query', 'gremlin_query', (['self', 'query', 'request_options'], {}), '(self, query, request_options)\n', (5750, 5780), False, 'from graphscope.framework.dag_utils import gremlin_query\n'), ((6581, 6707), 'graphscope.framework.dag_utils.gremlin_to_subgraph', 'gremlin_to_subgraph', (['self'], {'gremlin_script': 'gremlin_script', 'request_options': 'request_options', 'oid_type': 'self._graph._oid_type'}), '(self, gremlin_script=gremlin_script, request_options=\n request_options, oid_type=self._graph._oid_type)\n', (6600, 6707), False, 'from graphscope.framework.dag_utils import gremlin_to_subgraph\n'), ((6777, 6808), 'graphscope.framework.graph.GraphDAGNode', 'GraphDAGNode', (['self._session', 'op'], {}), '(self._session, op)\n', (6789, 6808), False, 'from graphscope.framework.graph import GraphDAGNode\n'), ((7051, 7080), 'graphscope.framework.dag_utils.close_interactive_query', 'close_interactive_query', (['self'], {}), '(self)\n', (7074, 7080), False, 'from graphscope.framework.dag_utils import close_interactive_query\n'), ((8571, 8612), 'copy.deepcopy', 'deepcopy', (['self._interactive_query_node.op'], {}), '(self._interactive_query_node.op)\n', (8579, 8612), False, 'from copy import deepcopy\n'), ((11549, 11593), 'gremlin_python.driver.driver_remote_connection.DriverRemoteConnection', 'DriverRemoteConnection', (['self._graph_url', '"""g"""'], {}), "(self._graph_url, 'g')\n", (11571, 11593), False, 'from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection\n'), ((11526, 11537), 'gremlin_python.process.anonymous_traversal.traversal', 'traversal', ([], {}), '()\n', (11535, 11537), False, 'from gremlin_python.process.anonymous_traversal import traversal\n')]
"""Unit tests for the :mod:`networkx.generators.random_graphs` module. """ import pytest from networkx.exception import NetworkXError from networkx.generators.tests.test_random_graphs import TestGeneratorsRandom from graphscope.nx.generators.random_graphs import barabasi_albert_graph from graphscope.nx.generators.random_graphs import binomial_graph from graphscope.nx.generators.random_graphs import connected_watts_strogatz_graph from graphscope.nx.generators.random_graphs import dense_gnm_random_graph from graphscope.nx.generators.random_graphs import dual_barabasi_albert_graph from graphscope.nx.generators.random_graphs import erdos_renyi_graph from graphscope.nx.generators.random_graphs import extended_barabasi_albert_graph from graphscope.nx.generators.random_graphs import fast_gnp_random_graph from graphscope.nx.generators.random_graphs import gnm_random_graph from graphscope.nx.generators.random_graphs import gnp_random_graph from graphscope.nx.generators.random_graphs import newman_watts_strogatz_graph from graphscope.nx.generators.random_graphs import powerlaw_cluster_graph from graphscope.nx.generators.random_graphs import random_kernel_graph from graphscope.nx.generators.random_graphs import random_lobster from graphscope.nx.generators.random_graphs import random_powerlaw_tree from graphscope.nx.generators.random_graphs import random_powerlaw_tree_sequence from graphscope.nx.generators.random_graphs import random_regular_graph from graphscope.nx.generators.random_graphs import random_shell_graph from graphscope.nx.generators.random_graphs import watts_strogatz_graph from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGeneratorsRandom) class TestGeneratorsRandom: def test_random_graph(self): seed = 42 G = gnp_random_graph(100, 0.25, seed) G = gnp_random_graph(100, 0.25, seed, directed=True) G = binomial_graph(100, 0.25, seed) G = erdos_renyi_graph(100, 0.25, seed) G = fast_gnp_random_graph(100, 0.25, seed) G = fast_gnp_random_graph(100, 0.25, seed, directed=True) G = gnm_random_graph(100, 20, seed) G = gnm_random_graph(100, 20, seed, directed=True) G = dense_gnm_random_graph(100, 20, seed) G = watts_strogatz_graph(10, 2, 0.25, seed) assert len(G) == 10 assert G.number_of_edges() == 10 G = connected_watts_strogatz_graph(10, 2, 0.1, tries=10, seed=seed) assert len(G) == 10 assert G.number_of_edges() == 10 pytest.raises( NetworkXError, connected_watts_strogatz_graph, 10, 2, 0.1, tries=0 ) G = watts_strogatz_graph(10, 4, 0.25, seed) assert len(G) == 10 assert G.number_of_edges() == 20 G = newman_watts_strogatz_graph(10, 2, 0.0, seed) assert len(G) == 10 assert G.number_of_edges() == 10 G = newman_watts_strogatz_graph(10, 4, 0.25, seed) assert len(G) == 10 assert G.number_of_edges() >= 20 G = barabasi_albert_graph(100, 1, seed) G = barabasi_albert_graph(100, 3, seed) assert G.number_of_edges() == (97 * 3) G = extended_barabasi_albert_graph(100, 1, 0, 0, seed) assert G.number_of_edges() == 99 G = extended_barabasi_albert_graph(100, 3, 0, 0, seed) assert G.number_of_edges() == 97 * 3 G = extended_barabasi_albert_graph(100, 1, 0, 0.5, seed) assert G.number_of_edges() == 99 G = extended_barabasi_albert_graph(100, 2, 0.5, 0, seed) assert G.number_of_edges() > 100 * 3 # FIXME(@acezen): the assertion failed. # assert G.number_of_edges() < 100 * 4 G = extended_barabasi_albert_graph(100, 2, 0.3, 0.3, seed) assert G.number_of_edges() > 100 * 2 # FIXME(@acezen): the assertion failed. # assert G.number_of_edges() < 100 * 4 G = powerlaw_cluster_graph(100, 1, 1.0, seed) G = powerlaw_cluster_graph(100, 3, 0.0, seed) assert G.number_of_edges() == (97 * 3) G = random_regular_graph(10, 20, seed) pytest.raises(NetworkXError, random_regular_graph, 3, 21) pytest.raises(NetworkXError, random_regular_graph, 33, 21) constructor = [(10, 20, 0.8), (20, 40, 0.8)] G = random_shell_graph(constructor, seed) def is_caterpillar(g): """ A tree is a caterpillar iff all nodes of degree >=3 are surrounded by at most two nodes of degree two or greater. ref: http://mathworld.wolfram.com/CaterpillarGraph.html """ deg_over_3 = [n for n in g if g.degree(n) >= 3] for n in deg_over_3: nbh_deg_over_2 = [nbh for nbh in g.neighbors(n) if g.degree(nbh) >= 2] if not len(nbh_deg_over_2) <= 2: return False return True def is_lobster(g): """ A tree is a lobster if it has the property that the removal of leaf nodes leaves a caterpillar graph (Gallian 2007) ref: http://mathworld.wolfram.com/LobsterGraph.html """ non_leafs = [n for n in g if g.degree(n) > 1] return is_caterpillar(g.subgraph(non_leafs)) G = random_lobster(10, 0.1, 0.5, seed) # FIXME(@acezen): the assertion failed. # assert max([G.degree(n) for n in G.nodes()]) > 3
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "graphscope.nx.generators.random_graphs.binomial_graph", "graphscope.nx.generators.random_graphs.newman_watts_strogatz_graph", "graphscope.nx.generators.random_graphs.fast_gnp_random_graph", "graphscope.nx.generators.random_graphs.random_shell_graph", "graphscope.nx.generators.random_graphs.random_lobster", "graphscope.nx.generators.random_graphs.gnm_random_graph", "graphscope.nx.generators.random_graphs.random_regular_graph", "pytest.raises", "graphscope.nx.generators.random_graphs.watts_strogatz_graph", "graphscope.nx.generators.random_graphs.gnp_random_graph", "graphscope.nx.generators.random_graphs.barabasi_albert_graph", "graphscope.nx.generators.random_graphs.powerlaw_cluster_graph", "pytest.mark.usefixtures", "graphscope.nx.generators.random_graphs.erdos_renyi_graph", "graphscope.nx.generators.random_graphs.extended_barabasi_albert_graph", "graphscope.nx.generators.random_graphs.dense_gnm_random_graph", "graphscope.nx.generators.random_graphs.connected_watts_strogatz_graph" ]
[((1673, 1718), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1696, 1718), False, 'import pytest\n'), ((1720, 1768), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGeneratorsRandom'], {}), '(TestGeneratorsRandom)\n', (1746, 1768), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1860, 1893), 'graphscope.nx.generators.random_graphs.gnp_random_graph', 'gnp_random_graph', (['(100)', '(0.25)', 'seed'], {}), '(100, 0.25, seed)\n', (1876, 1893), False, 'from graphscope.nx.generators.random_graphs import gnp_random_graph\n'), ((1906, 1954), 'graphscope.nx.generators.random_graphs.gnp_random_graph', 'gnp_random_graph', (['(100)', '(0.25)', 'seed'], {'directed': '(True)'}), '(100, 0.25, seed, directed=True)\n', (1922, 1954), False, 'from graphscope.nx.generators.random_graphs import gnp_random_graph\n'), ((1967, 1998), 'graphscope.nx.generators.random_graphs.binomial_graph', 'binomial_graph', (['(100)', '(0.25)', 'seed'], {}), '(100, 0.25, seed)\n', (1981, 1998), False, 'from graphscope.nx.generators.random_graphs import binomial_graph\n'), ((2011, 2045), 'graphscope.nx.generators.random_graphs.erdos_renyi_graph', 'erdos_renyi_graph', (['(100)', '(0.25)', 'seed'], {}), '(100, 0.25, seed)\n', (2028, 2045), False, 'from graphscope.nx.generators.random_graphs import erdos_renyi_graph\n'), ((2058, 2096), 'graphscope.nx.generators.random_graphs.fast_gnp_random_graph', 'fast_gnp_random_graph', (['(100)', '(0.25)', 'seed'], {}), '(100, 0.25, seed)\n', (2079, 2096), False, 'from graphscope.nx.generators.random_graphs import fast_gnp_random_graph\n'), ((2109, 2162), 'graphscope.nx.generators.random_graphs.fast_gnp_random_graph', 'fast_gnp_random_graph', (['(100)', '(0.25)', 'seed'], {'directed': '(True)'}), '(100, 0.25, seed, directed=True)\n', (2130, 2162), False, 'from graphscope.nx.generators.random_graphs import fast_gnp_random_graph\n'), ((2175, 2206), 'graphscope.nx.generators.random_graphs.gnm_random_graph', 'gnm_random_graph', (['(100)', '(20)', 'seed'], {}), '(100, 20, seed)\n', (2191, 2206), False, 'from graphscope.nx.generators.random_graphs import gnm_random_graph\n'), ((2219, 2265), 'graphscope.nx.generators.random_graphs.gnm_random_graph', 'gnm_random_graph', (['(100)', '(20)', 'seed'], {'directed': '(True)'}), '(100, 20, seed, directed=True)\n', (2235, 2265), False, 'from graphscope.nx.generators.random_graphs import gnm_random_graph\n'), ((2278, 2315), 'graphscope.nx.generators.random_graphs.dense_gnm_random_graph', 'dense_gnm_random_graph', (['(100)', '(20)', 'seed'], {}), '(100, 20, seed)\n', (2300, 2315), False, 'from graphscope.nx.generators.random_graphs import dense_gnm_random_graph\n'), ((2329, 2368), 'graphscope.nx.generators.random_graphs.watts_strogatz_graph', 'watts_strogatz_graph', (['(10)', '(2)', '(0.25)', 'seed'], {}), '(10, 2, 0.25, seed)\n', (2349, 2368), False, 'from graphscope.nx.generators.random_graphs import watts_strogatz_graph\n'), ((2451, 2514), 'graphscope.nx.generators.random_graphs.connected_watts_strogatz_graph', 'connected_watts_strogatz_graph', (['(10)', '(2)', '(0.1)'], {'tries': '(10)', 'seed': 'seed'}), '(10, 2, 0.1, tries=10, seed=seed)\n', (2481, 2514), False, 'from graphscope.nx.generators.random_graphs import connected_watts_strogatz_graph\n'), ((2592, 2677), 'pytest.raises', 'pytest.raises', (['NetworkXError', 'connected_watts_strogatz_graph', '(10)', '(2)', '(0.1)'], {'tries': '(0)'}), '(NetworkXError, connected_watts_strogatz_graph, 10, 2, 0.1,\n tries=0)\n', (2605, 2677), False, 'import pytest\n'), ((2709, 2748), 'graphscope.nx.generators.random_graphs.watts_strogatz_graph', 'watts_strogatz_graph', (['(10)', '(4)', '(0.25)', 'seed'], {}), '(10, 4, 0.25, seed)\n', (2729, 2748), False, 'from graphscope.nx.generators.random_graphs import watts_strogatz_graph\n'), ((2831, 2876), 'graphscope.nx.generators.random_graphs.newman_watts_strogatz_graph', 'newman_watts_strogatz_graph', (['(10)', '(2)', '(0.0)', 'seed'], {}), '(10, 2, 0.0, seed)\n', (2858, 2876), False, 'from graphscope.nx.generators.random_graphs import newman_watts_strogatz_graph\n'), ((2959, 3005), 'graphscope.nx.generators.random_graphs.newman_watts_strogatz_graph', 'newman_watts_strogatz_graph', (['(10)', '(4)', '(0.25)', 'seed'], {}), '(10, 4, 0.25, seed)\n', (2986, 3005), False, 'from graphscope.nx.generators.random_graphs import newman_watts_strogatz_graph\n'), ((3088, 3123), 'graphscope.nx.generators.random_graphs.barabasi_albert_graph', 'barabasi_albert_graph', (['(100)', '(1)', 'seed'], {}), '(100, 1, seed)\n', (3109, 3123), False, 'from graphscope.nx.generators.random_graphs import barabasi_albert_graph\n'), ((3136, 3171), 'graphscope.nx.generators.random_graphs.barabasi_albert_graph', 'barabasi_albert_graph', (['(100)', '(3)', 'seed'], {}), '(100, 3, seed)\n', (3157, 3171), False, 'from graphscope.nx.generators.random_graphs import barabasi_albert_graph\n'), ((3232, 3282), 'graphscope.nx.generators.random_graphs.extended_barabasi_albert_graph', 'extended_barabasi_albert_graph', (['(100)', '(1)', '(0)', '(0)', 'seed'], {}), '(100, 1, 0, 0, seed)\n', (3262, 3282), False, 'from graphscope.nx.generators.random_graphs import extended_barabasi_albert_graph\n'), ((3336, 3386), 'graphscope.nx.generators.random_graphs.extended_barabasi_albert_graph', 'extended_barabasi_albert_graph', (['(100)', '(3)', '(0)', '(0)', 'seed'], {}), '(100, 3, 0, 0, seed)\n', (3366, 3386), False, 'from graphscope.nx.generators.random_graphs import extended_barabasi_albert_graph\n'), ((3444, 3496), 'graphscope.nx.generators.random_graphs.extended_barabasi_albert_graph', 'extended_barabasi_albert_graph', (['(100)', '(1)', '(0)', '(0.5)', 'seed'], {}), '(100, 1, 0, 0.5, seed)\n', (3474, 3496), False, 'from graphscope.nx.generators.random_graphs import extended_barabasi_albert_graph\n'), ((3550, 3602), 'graphscope.nx.generators.random_graphs.extended_barabasi_albert_graph', 'extended_barabasi_albert_graph', (['(100)', '(2)', '(0.5)', '(0)', 'seed'], {}), '(100, 2, 0.5, 0, seed)\n', (3580, 3602), False, 'from graphscope.nx.generators.random_graphs import extended_barabasi_albert_graph\n'), ((3756, 3810), 'graphscope.nx.generators.random_graphs.extended_barabasi_albert_graph', 'extended_barabasi_albert_graph', (['(100)', '(2)', '(0.3)', '(0.3)', 'seed'], {}), '(100, 2, 0.3, 0.3, seed)\n', (3786, 3810), False, 'from graphscope.nx.generators.random_graphs import extended_barabasi_albert_graph\n'), ((3964, 4005), 'graphscope.nx.generators.random_graphs.powerlaw_cluster_graph', 'powerlaw_cluster_graph', (['(100)', '(1)', '(1.0)', 'seed'], {}), '(100, 1, 1.0, seed)\n', (3986, 4005), False, 'from graphscope.nx.generators.random_graphs import powerlaw_cluster_graph\n'), ((4018, 4059), 'graphscope.nx.generators.random_graphs.powerlaw_cluster_graph', 'powerlaw_cluster_graph', (['(100)', '(3)', '(0.0)', 'seed'], {}), '(100, 3, 0.0, seed)\n', (4040, 4059), False, 'from graphscope.nx.generators.random_graphs import powerlaw_cluster_graph\n'), ((4120, 4154), 'graphscope.nx.generators.random_graphs.random_regular_graph', 'random_regular_graph', (['(10)', '(20)', 'seed'], {}), '(10, 20, seed)\n', (4140, 4154), False, 'from graphscope.nx.generators.random_graphs import random_regular_graph\n'), ((4164, 4221), 'pytest.raises', 'pytest.raises', (['NetworkXError', 'random_regular_graph', '(3)', '(21)'], {}), '(NetworkXError, random_regular_graph, 3, 21)\n', (4177, 4221), False, 'import pytest\n'), ((4230, 4288), 'pytest.raises', 'pytest.raises', (['NetworkXError', 'random_regular_graph', '(33)', '(21)'], {}), '(NetworkXError, random_regular_graph, 33, 21)\n', (4243, 4288), False, 'import pytest\n'), ((4355, 4392), 'graphscope.nx.generators.random_graphs.random_shell_graph', 'random_shell_graph', (['constructor', 'seed'], {}), '(constructor, seed)\n', (4373, 4392), False, 'from graphscope.nx.generators.random_graphs import random_shell_graph\n'), ((5341, 5375), 'graphscope.nx.generators.random_graphs.random_lobster', 'random_lobster', (['(10)', '(0.1)', '(0.5)', 'seed'], {}), '(10, 0.1, 0.5, seed)\n', (5355, 5375), False, 'from graphscope.nx.generators.random_graphs import random_lobster\n')]
import pytest import graphscope.nx as nx @pytest.mark.usefixtures("graphscope_session") class TestAsteroidal: def test_is_at_free(self): is_at_free = nx.asteroidal.is_at_free cycle = nx.cycle_graph(6) assert not is_at_free(cycle) path = nx.path_graph(6) assert is_at_free(path) small_graph = nx.complete_graph(2) assert is_at_free(small_graph) petersen = nx.petersen_graph() assert not is_at_free(petersen) clique = nx.complete_graph(6) assert is_at_free(clique)
[ "graphscope.nx.cycle_graph", "graphscope.nx.complete_graph", "pytest.mark.usefixtures", "graphscope.nx.path_graph", "graphscope.nx.petersen_graph" ]
[((45, 90), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (68, 90), False, 'import pytest\n'), ((207, 224), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(6)'], {}), '(6)\n', (221, 224), True, 'import graphscope.nx as nx\n'), ((278, 294), 'graphscope.nx.path_graph', 'nx.path_graph', (['(6)'], {}), '(6)\n', (291, 294), True, 'import graphscope.nx as nx\n'), ((350, 370), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(2)'], {}), '(2)\n', (367, 370), True, 'import graphscope.nx as nx\n'), ((430, 449), 'graphscope.nx.petersen_graph', 'nx.petersen_graph', ([], {}), '()\n', (447, 449), True, 'import graphscope.nx as nx\n'), ((508, 528), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(6)'], {}), '(6)\n', (525, 528), True, 'import graphscope.nx as nx\n')]
import networkx.algorithms.shortest_paths.tests.test_dense import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.shortest_paths.tests.test_dense, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.shortest_paths.tests.test_dense import TestFloyd @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestFloyd) class TestFloyd(): def test_zero_weight(self): G = nx.DiGraph() edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)] G.add_weighted_edges_from(edges) dist = nx.floyd_warshall(G) assert dist[1][3] == -14
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((441, 486), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (464, 486), False, 'import pytest\n'), ((488, 525), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestFloyd'], {}), '(TestFloyd)\n', (514, 525), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((316, 361), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (339, 361), False, 'import pytest\n')]
#!/usr/bin/env python # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # """Unit tests for the :mod:`networkx.algorithms.boundary` module.""" import pytest from networkx.testing import assert_edges_equal from graphscope import nx @pytest.mark.usefixtures("graphscope_session") class TestNodeBoundary: """Unit tests for the :func:`~networkx.builtin.node_boundary` function.""" def test_path_graph(self): P10 = nx.path_graph(10) assert nx.builtin.node_boundary(P10, [0, 1, 2]) == [3] assert nx.builtin.node_boundary(P10, [3, 4, 5]) == [2, 6] assert nx.builtin.node_boundary(P10, [2, 3, 4, 5, 6]) == [1, 7] assert nx.builtin.node_boundary(P10, [7, 8, 9]) == [6] def test_complete_graph(self): K10 = nx.complete_graph(10) assert sorted(nx.builtin.node_boundary(K10, [0, 1, 2])) == [3, 4, 5, 6, 7, 8, 9] assert sorted(nx.builtin.node_boundary(K10, [3, 4, 5])) == [0, 1, 2, 6, 7, 8, 9] assert sorted(nx.builtin.node_boundary(K10, [2, 3, 4, 5, 6])) == [0, 1, 7, 8, 9] assert nx.builtin.node_boundary(K10, [0, 1, 2], [2, 3, 4]) == [4, 3] def test_directed(self): """Tests the node boundary of a directed graph.""" G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]) S = [0, 1] boundary = nx.builtin.node_boundary(G, S) expected = [2] assert boundary == expected @pytest.mark.usefixtures("graphscope_session") class TestEdgeBoundary: """Unit tests for the :func:`~networkx.builtin.edge_boundary` function.""" def test_path_graph(self): P10 = nx.path_graph(10) assert nx.builtin.edge_boundary(P10, [0, 1, 2]) == [[2, 3]] assert sorted(nx.builtin.edge_boundary(P10, [3, 4, 5])) == [[3, 2], [5, 6]] assert sorted(nx.builtin.edge_boundary(P10, [2, 3, 4, 5, 6])) == [ [2, 1], [6, 7], ] assert nx.builtin.edge_boundary(P10, [7, 8, 9]) == [[7, 6]] assert sorted(nx.builtin.edge_boundary(P10, [0, 1, 2], [2, 3, 4])) == [ [1, 2], [2, 3], ] def test_complete_graph(self): K10 = nx.complete_graph(10) def ilen(iterable): return sum(1 for i in iterable) assert ilen(nx.builtin.edge_boundary(K10, [0, 1, 2])) == 21 assert ilen(nx.builtin.edge_boundary(K10, [3, 4, 5, 6])) == 24 assert ilen(nx.builtin.edge_boundary(K10, [2, 3, 4, 5, 6])) == 25 assert ilen(nx.builtin.edge_boundary(K10, [7, 8, 9])) == 21 assert_edges_equal( nx.builtin.edge_boundary(K10, [3, 4, 5], [8, 9]), [[3, 8], [3, 9], [4, 8], [4, 9], [5, 8], [5, 9]], ) assert_edges_equal( nx.builtin.edge_boundary(K10, [0, 1, 2], [2, 3, 4]), [[0, 2], [0, 3], [0, 4], [1, 2], [1, 3], [1, 4], [2, 3], [2, 4]], ) def test_directed(self): """Tests the edge boundary of a directed graph.""" G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]) S = [0, 1] boundary = nx.builtin.edge_boundary(G, S) expected = [[1, 2]] assert boundary == expected
[ "graphscope.nx.builtin.edge_boundary", "graphscope.nx.complete_graph", "pytest.mark.usefixtures", "graphscope.nx.DiGraph", "graphscope.nx.builtin.node_boundary", "graphscope.nx.path_graph" ]
[((537, 582), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (560, 582), False, 'import pytest\n'), ((1715, 1760), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1738, 1760), False, 'import pytest\n'), ((732, 749), 'graphscope.nx.path_graph', 'nx.path_graph', (['(10)'], {}), '(10)\n', (745, 749), False, 'from graphscope import nx\n'), ((1064, 1085), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(10)'], {}), '(10)\n', (1081, 1085), False, 'from graphscope import nx\n'), ((1531, 1583), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['[(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]'], {}), '([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])\n', (1541, 1583), False, 'from graphscope import nx\n'), ((1622, 1652), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['G', 'S'], {}), '(G, S)\n', (1646, 1652), False, 'from graphscope import nx\n'), ((1910, 1927), 'graphscope.nx.path_graph', 'nx.path_graph', (['(10)'], {}), '(10)\n', (1923, 1927), False, 'from graphscope import nx\n'), ((2453, 2474), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(10)'], {}), '(10)\n', (2470, 2474), False, 'from graphscope import nx\n'), ((3274, 3326), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['[(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]'], {}), '([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])\n', (3284, 3326), False, 'from graphscope import nx\n'), ((3365, 3395), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['G', 'S'], {}), '(G, S)\n', (3389, 3395), False, 'from graphscope import nx\n'), ((765, 805), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['P10', '[0, 1, 2]'], {}), '(P10, [0, 1, 2])\n', (789, 805), False, 'from graphscope import nx\n'), ((828, 868), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['P10', '[3, 4, 5]'], {}), '(P10, [3, 4, 5])\n', (852, 868), False, 'from graphscope import nx\n'), ((894, 940), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['P10', '[2, 3, 4, 5, 6]'], {}), '(P10, [2, 3, 4, 5, 6])\n', (918, 940), False, 'from graphscope import nx\n'), ((966, 1006), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['P10', '[7, 8, 9]'], {}), '(P10, [7, 8, 9])\n', (990, 1006), False, 'from graphscope import nx\n'), ((1368, 1419), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['K10', '[0, 1, 2]', '[2, 3, 4]'], {}), '(K10, [0, 1, 2], [2, 3, 4])\n', (1392, 1419), False, 'from graphscope import nx\n'), ((1943, 1983), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['P10', '[0, 1, 2]'], {}), '(P10, [0, 1, 2])\n', (1967, 1983), False, 'from graphscope import nx\n'), ((2220, 2260), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['P10', '[7, 8, 9]'], {}), '(P10, [7, 8, 9])\n', (2244, 2260), False, 'from graphscope import nx\n'), ((2870, 2918), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['K10', '[3, 4, 5]', '[8, 9]'], {}), '(K10, [3, 4, 5], [8, 9])\n', (2894, 2918), False, 'from graphscope import nx\n'), ((3032, 3083), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['K10', '[0, 1, 2]', '[2, 3, 4]'], {}), '(K10, [0, 1, 2], [2, 3, 4])\n', (3056, 3083), False, 'from graphscope import nx\n'), ((1108, 1148), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['K10', '[0, 1, 2]'], {}), '(K10, [0, 1, 2])\n', (1132, 1148), False, 'from graphscope import nx\n'), ((1197, 1237), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['K10', '[3, 4, 5]'], {}), '(K10, [3, 4, 5])\n', (1221, 1237), False, 'from graphscope import nx\n'), ((1286, 1332), 'graphscope.nx.builtin.node_boundary', 'nx.builtin.node_boundary', (['K10', '[2, 3, 4, 5, 6]'], {}), '(K10, [2, 3, 4, 5, 6])\n', (1310, 1332), False, 'from graphscope import nx\n'), ((2018, 2058), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['P10', '[3, 4, 5]'], {}), '(P10, [3, 4, 5])\n', (2042, 2058), False, 'from graphscope import nx\n'), ((2102, 2148), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['P10', '[2, 3, 4, 5, 6]'], {}), '(P10, [2, 3, 4, 5, 6])\n', (2126, 2148), False, 'from graphscope import nx\n'), ((2295, 2346), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['P10', '[0, 1, 2]', '[2, 3, 4]'], {}), '(P10, [0, 1, 2], [2, 3, 4])\n', (2319, 2346), False, 'from graphscope import nx\n'), ((2569, 2609), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['K10', '[0, 1, 2]'], {}), '(K10, [0, 1, 2])\n', (2593, 2609), False, 'from graphscope import nx\n'), ((2637, 2680), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['K10', '[3, 4, 5, 6]'], {}), '(K10, [3, 4, 5, 6])\n', (2661, 2680), False, 'from graphscope import nx\n'), ((2708, 2754), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['K10', '[2, 3, 4, 5, 6]'], {}), '(K10, [2, 3, 4, 5, 6])\n', (2732, 2754), False, 'from graphscope import nx\n'), ((2782, 2822), 'graphscope.nx.builtin.edge_boundary', 'nx.builtin.edge_boundary', (['K10', '[7, 8, 9]'], {}), '(K10, [7, 8, 9])\n', (2806, 2822), False, 'from graphscope import nx\n')]
# # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pickle import pytest from networkx.classes.tests.test_graph import TestEdgeSubgraph as _TestEdgeSubgraph from networkx.classes.tests.test_graph import TestGraph as _TestGraph from graphscope.experimental import nx from graphscope.experimental.nx.tests.utils import assert_nodes_equal @pytest.mark.usefixtures("graphscope_session") class TestGraph(_TestGraph): def setup_method(self): self.Graph = nx.Graph self.k3nodes = [0, 1, 2] self.k3edges = [(0, 1), (0, 2), (1, 2)] self.K3 = self.Graph() self.K3.update(self.k3edges, self.k3nodes) def graphs_equal(self, H, G): assert G.adj == H.adj assert G.nodes == H.nodes assert G.graph == H.graph assert G.name == H.name assert G.adj == H.adj if G.is_directed() and H.is_directed(): assert G.pred == H.pred assert G.succ == H.succ def shallow_copy_graph_attr(self, H, G): assert G.graph["foo"] == H.graph["foo"] G.graph["foo"] = "new_foo" assert G.graph["foo"] == H.graph["foo"] def shallow_copy_node_attr(self, H, G): assert G.nodes[0]["foo"] == H.nodes[0]["foo"] G.nodes[0]["foo"] = "new_foo" assert G.nodes[0]["foo"] == H.nodes[0]["foo"] def shallow_copy_edge_attr(self, H, G): assert G[1][2]["foo"] == H[1][2]["foo"] G[1][2]["foo"] = "new_foo" assert G[1][2]["foo"] == H[1][2]["foo"] def add_attributes(self, G): G.graph["foo"] = "foo" G.nodes[0]["foo"] = "foo" G.remove_edge(1, 2) ll = "ll" G.add_edge(1, 2, foo=ll) G.add_edge(2, 1, foo=ll) def test_memory_leak(self): pass def test_pickle(self): pass @pytest.mark.skip(reason="not support to_undirected not as view") def test_to_undirected(self): pass @pytest.mark.skip(reason="not support to_directed not as view") def test_to_directed(self): pass @pytest.mark.skip(reason="not support clear_edges in Graph yet.") def test_clear_edges(self): pass def test_graph_chain(self): # subgraph now is fallback with networkx, not view G = self.Graph([(0, 1), (1, 2)]) DG = G.to_directed(as_view=True) RDG = DG.reverse(copy=False) assert G is DG._graph assert DG is RDG._graph def test_copy(self): G = self.Graph() G.add_node(0) G.add_edge(1, 2) self.add_attributes(G) # deep copy H = G.copy() self.graphs_equal(H, G) def test_class_copy(self): G = self.Graph() G.add_node(0) G.add_edge(1, 2) self.add_attributes(G) # deep copy H = G.__class__(G) self.graphs_equal(H, G) def test_subgraph(self): # subgraph now is fallback with networkx, not view G = self.K3 self.add_attributes(G) H = G.subgraph([0, 1, 2, 5]) self.graphs_equal(H, G) class TestEdgeSubgraph(_TestEdgeSubgraph): def setup_method(self): # Create a path graph on five nodes. G = nx.path_graph(5) # Add some node, edge, and graph attributes. for i in range(5): G.nodes[i]["name"] = "node{}".format(i) G.edges[0, 1]["name"] = "edge01" G.edges[3, 4]["name"] = "edge34" G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. self.G = G self.H = G.edge_subgraph([(0, 1), (3, 4)]) @pytest.mark.skip(reason="edge_subgraph now is fallback with networkx, not view") def test_node_attr_dict(self): pass @pytest.mark.skip(reason="edge_subgraph now is fallback with networkx, not view") def test_edge_attr_dict(self): pass @pytest.mark.skip(reason="edge_subgraph now is fallback with networkx, not view") def test_graph_attr_dict(self): pass @pytest.mark.skip(reason="edge_subgraph now is fallback with networkx, not view") def test_remove_node(self): pass
[ "pytest.mark.skip", "pytest.mark.usefixtures", "graphscope.experimental.nx.path_graph" ]
[((648, 693), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (671, 693), False, 'import pytest\n'), ((2107, 2171), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support to_undirected not as view"""'}), "(reason='not support to_undirected not as view')\n", (2123, 2171), False, 'import pytest\n'), ((2225, 2287), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support to_directed not as view"""'}), "(reason='not support to_directed not as view')\n", (2241, 2287), False, 'import pytest\n'), ((2339, 2403), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support clear_edges in Graph yet."""'}), "(reason='not support clear_edges in Graph yet.')\n", (2355, 2403), False, 'import pytest\n'), ((3882, 3967), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""edge_subgraph now is fallback with networkx, not view"""'}), "(reason='edge_subgraph now is fallback with networkx, not view'\n )\n", (3898, 3967), False, 'import pytest\n'), ((4017, 4102), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""edge_subgraph now is fallback with networkx, not view"""'}), "(reason='edge_subgraph now is fallback with networkx, not view'\n )\n", (4033, 4102), False, 'import pytest\n'), ((4152, 4237), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""edge_subgraph now is fallback with networkx, not view"""'}), "(reason='edge_subgraph now is fallback with networkx, not view'\n )\n", (4168, 4237), False, 'import pytest\n'), ((4288, 4373), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""edge_subgraph now is fallback with networkx, not view"""'}), "(reason='edge_subgraph now is fallback with networkx, not view'\n )\n", (4304, 4373), False, 'import pytest\n'), ((3477, 3493), 'graphscope.experimental.nx.path_graph', 'nx.path_graph', (['(5)'], {}), '(5)\n', (3490, 3493), False, 'from graphscope.experimental import nx\n')]
""" ==================== Generators - Classic ==================== Unit tests for various classic graph generators in generators/classic.py """ import pytest from networkx.generators.tests.test_classic import TestGeneratorClassic import graphscope.nx as nx from graphscope.nx import is_isomorphic from graphscope.nx.tests.utils import assert_edges_equal from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGeneratorClassic) class TestGeneratorClassic: @pytest.mark.skip(reason="FIXME: test take too much time.") def test_dorogovtsev_goltsev_mendes_graph(self): pass def test_ladder_graph(self): for i, G in [ (0, nx.empty_graph(0)), (1, nx.path_graph(2)), ]: assert is_isomorphic(nx.ladder_graph(i), G) pytest.raises(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph) g = nx.ladder_graph(2) mg = nx.ladder_graph(2, create_using=nx.MultiGraph) assert_edges_equal(mg.edges(), g.edges())
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.raises", "pytest.mark.skip", "graphscope.nx.empty_graph", "graphscope.nx.ladder_graph", "pytest.mark.usefixtures", "graphscope.nx.path_graph" ]
[((425, 470), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (448, 470), False, 'import pytest\n'), ((472, 520), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGeneratorClassic'], {}), '(TestGeneratorClassic)\n', (498, 520), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((554, 612), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME: test take too much time."""'}), "(reason='FIXME: test take too much time.')\n", (570, 612), False, 'import pytest\n'), ((882, 958), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.ladder_graph', '(2)'], {'create_using': 'nx.DiGraph'}), '(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph)\n', (895, 958), False, 'import pytest\n'), ((972, 990), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['(2)'], {}), '(2)\n', (987, 990), True, 'import graphscope.nx as nx\n'), ((1004, 1050), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['(2)'], {'create_using': 'nx.MultiGraph'}), '(2, create_using=nx.MultiGraph)\n', (1019, 1050), True, 'import graphscope.nx as nx\n'), ((751, 768), 'graphscope.nx.empty_graph', 'nx.empty_graph', (['(0)'], {}), '(0)\n', (765, 768), True, 'import graphscope.nx as nx\n'), ((787, 803), 'graphscope.nx.path_graph', 'nx.path_graph', (['(2)'], {}), '(2)\n', (800, 803), True, 'import graphscope.nx as nx\n'), ((850, 868), 'graphscope.nx.ladder_graph', 'nx.ladder_graph', (['i'], {}), '(i)\n', (865, 868), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: <NAME><<EMAIL>> # import json from graphscope.framework.app import AppAssets from graphscope.framework.app import not_compatible_for from graphscope.framework.app import project_to_simple __all__ = ["is_simple_path"] @project_to_simple @not_compatible_for("arrow_property") def is_simple_path(G, nodes): """Returns True if and only if `nodes` form a simple path in `G`. A *simple path* in a graph is a nonempty sequence of nodes in which no node appears more than once in the sequence, and each adjacent pair of nodes in the sequence is adjacent in the graph. Parameters ---------- nodes : list A list of one or more nodes in the graph `G`. Returns ------- bool Whether the given list of nodes represents a simple path in `G`. Notes ----- An empty list of nodes is not a path but a list of one node is a path. Here's an explanation why. This function operates on *node paths*. One could also consider *edge paths*. There is a bijection between node paths and edge paths. The *length of a path* is the number of edges in the path, so a list of nodes of length *n* corresponds to a path of length *n* - 1. Thus the smallest edge path would be a list of zero edges, the empty path. This corresponds to a list of one node. To convert between a node path and an edge path, you can use code like the following:: >>> from networkx.utils import pairwise >>> nodes = [0, 1, 2, 3] >>> edges = list(pairwise(nodes)) >>> edges [(0, 1), (1, 2), (2, 3)] >>> nodes = [edges[0][0]] + [v for u, v in edges] >>> nodes [0, 1, 2, 3] Examples -------- >>> G = nx.cycle_graph(4) >>> nx.is_simple_path(G, [2, 3, 0]) True >>> nx.is_simple_path(G, [0, 2]) False """ if isinstance(nodes, list): n1json = json.dumps(nodes) ctx = AppAssets(algo="is_simple_path", context="tensor")(G, n1json) return ctx.to_numpy("r", axis=0)[0] raise ValueError("input nodes is not a list object!")
[ "graphscope.framework.app.AppAssets", "json.dumps", "graphscope.framework.app.not_compatible_for" ]
[((918, 954), 'graphscope.framework.app.not_compatible_for', 'not_compatible_for', (['"""arrow_property"""'], {}), "('arrow_property')\n", (936, 954), False, 'from graphscope.framework.app import not_compatible_for\n'), ((2585, 2602), 'json.dumps', 'json.dumps', (['nodes'], {}), '(nodes)\n', (2595, 2602), False, 'import json\n'), ((2617, 2667), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""is_simple_path"""', 'context': '"""tensor"""'}), "(algo='is_simple_path', context='tensor')\n", (2626, 2667), False, 'from graphscope.framework.app import AppAssets\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file reportviews.py is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/classes/reportviews.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # from networkx.classes.reportviews import EdgeDataView as _EdgeDataView from networkx.classes.reportviews import EdgeView as _EdgeView from networkx.classes.reportviews import InEdgeDataView as _InEdgeDataView from networkx.classes.reportviews import InEdgeView as _InEdgeView from networkx.classes.reportviews import NodeView as _NodeView from networkx.classes.reportviews import OutEdgeDataView as _OutEdgeDataView from networkx.classes.reportviews import OutEdgeView as _OutEdgeView from graphscope.nx.utils.compat import patch_docstring # NodeViews @patch_docstring(_NodeView) class NodeView(_NodeView): __slots__ = ( "_graph", "_nodes", ) def __getstate__(self): return {"_graph": self._graph, "_nodes": self._nodes} def __setstate__(self, state): self._graph = state["_graph"] self._nodes = state["_nodes"] def __init__(self, graph): self._graph = graph self._nodes = graph._node # Mapping methods def __len__(self): return self._graph.number_of_nodes() def __iter__(self): return iter(self._nodes) # EdgeDataViews @patch_docstring(_OutEdgeDataView) class OutEdgeDataView(_OutEdgeDataView): def __len__(self): if self._nbunch: return sum(len(nbrs) for n, nbrs in self._nodes_nbrs()) return self._viewer._graph.number_of_edges() @patch_docstring(_EdgeDataView) class EdgeDataView(_EdgeDataView): def __len__(self): if self._nbunch: return sum(1 for e in self) return self._viewer._graph.number_of_edges() @patch_docstring(_InEdgeDataView) class InEdgeDataView(_InEdgeDataView): def __len__(self): if self._nbunch: return sum(len(nbrs) for n, nbrs in self._nodes_nbrs()) return self._viewer._graph.number_of_edges() @patch_docstring(_OutEdgeView) class OutEdgeView(_OutEdgeView): dataview = OutEdgeDataView # Set methods def __len__(self): return self._graph.number_of_edges() @patch_docstring(_EdgeView) class EdgeView(_EdgeView): __slots__ = () dataview = EdgeDataView # Set methods def __len__(self): return self._graph.number_of_edges() @patch_docstring(_InEdgeView) class InEdgeView(_InEdgeView): dataview = InEdgeDataView # Set methods def __len__(self): return self._graph.number_of_edges() def __contains__(self, e): return self._graph.has_edge(*e)
[ "graphscope.nx.utils.compat.patch_docstring" ]
[((1058, 1084), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_NodeView'], {}), '(_NodeView)\n', (1073, 1084), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((1637, 1670), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_OutEdgeDataView'], {}), '(_OutEdgeDataView)\n', (1652, 1670), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((1884, 1914), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_EdgeDataView'], {}), '(_EdgeDataView)\n', (1899, 1914), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((2094, 2126), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_InEdgeDataView'], {}), '(_InEdgeDataView)\n', (2109, 2126), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((2338, 2367), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_OutEdgeView'], {}), '(_OutEdgeView)\n', (2353, 2367), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((2523, 2549), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_EdgeView'], {}), '(_EdgeView)\n', (2538, 2549), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((2715, 2743), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_InEdgeView'], {}), '(_InEdgeView)\n', (2730, 2743), False, 'from graphscope.nx.utils.compat import patch_docstring\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import os import pytest from networkx.tests.test_convert_pandas import TestConvertPandas as _TestConvertPandas from networkx.utils import edges_equal from networkx.utils import graphs_equal from networkx.utils import nodes_equal import graphscope.nx as nx from graphscope.nx.utils.compat import with_graphscope_nx_context np = pytest.importorskip("numpy") pd = pytest.importorskip("pandas") @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(_TestConvertPandas) class TestConvertPandas: def test_edgekey_with_multigraph(self): pass def test_edgekey_with_normal_graph_no_action(self): pass def test_nonexisting_edgekey_raises(self): pass def test_from_edgelist_int_attr_name(self): pass @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="num_worker=2: DataFrame.index values are different", ) def test_from_adjacency_named(self): # example from issue #3105 data = { "A": {"A": 0, "B": 0, "C": 0}, "B": {"A": 1, "B": 0, "C": 0}, "C": {"A": 0, "B": 1, "C": 0}, } dftrue = pd.DataFrame(data) df = dftrue[["A", "C", "B"]] G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph()) df = nx.to_pandas_adjacency(G, dtype=np.intp) pd.testing.assert_frame_equal(df, dftrue) @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="num_worker=2: DataFrame.index values are different", ) def test_from_edgelist_multi_attr_incl_target(self): Gtrue = nx.Graph( [ ("E", "C", {"b": "E", "weight": 10}), ("B", "A", {"b": "A", "weight": 7}), ("A", "D", {"b": "D", "weight": 4}), ] ) G = nx.from_pandas_edgelist(self.df, 0, "b", ["b", "weight"]) assert graphs_equal(G, Gtrue) @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="num_worker=2: DataFrame.index values are different", ) def test_from_adjacency(self): nodelist = [1, 2] dftrue = pd.DataFrame( [[1, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist ) G = nx.Graph([(1, 1), (1, 2)]) df = nx.to_pandas_adjacency(G, dtype=int) pd.testing.assert_frame_equal(df, dftrue) def test_from_edgelist(self): # Pandas DataFrame G = nx.cycle_graph(10) G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) edgelist = nx.to_edgelist(G) source = [] target = [] weight = [] # N.B the iterate order of edgelist may not all the same for s, t, d in edgelist: source.append(s) target.append(t) weight.append(d["weight"]) edges = pd.DataFrame({"source": source, "target": target, "weight": weight}) GG = nx.from_pandas_edgelist(edges, edge_attr="weight") assert nodes_equal(G.nodes(), GG.nodes()) assert edges_equal(G.edges(), GG.edges()) GW = nx.to_networkx_graph(edges, create_using=nx.Graph) assert nodes_equal(G.nodes(), GW.nodes()) assert edges_equal(G.edges(), GW.edges())
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "networkx.utils.graphs_equal", "graphscope.nx.to_edgelist", "graphscope.nx.from_pandas_edgelist", "graphscope.nx.to_networkx_graph", "graphscope.nx.Graph", "graphscope.nx.cycle_graph", "pytest.mark.usefixtures", "graphscope.nx.DiGraph", "graphscope.nx.to_pandas_adjacency", "os.environ.get", "pytest.importorskip" ]
[((730, 758), 'pytest.importorskip', 'pytest.importorskip', (['"""numpy"""'], {}), "('numpy')\n", (749, 758), False, 'import pytest\n'), ((764, 793), 'pytest.importorskip', 'pytest.importorskip', (['"""pandas"""'], {}), "('pandas')\n", (783, 793), False, 'import pytest\n'), ((797, 842), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (820, 842), False, 'import pytest\n'), ((844, 890), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['_TestConvertPandas'], {}), '(_TestConvertPandas)\n', (870, 890), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((1713, 1753), 'graphscope.nx.to_pandas_adjacency', 'nx.to_pandas_adjacency', (['G'], {'dtype': 'np.intp'}), '(G, dtype=np.intp)\n', (1735, 1753), True, 'import graphscope.nx as nx\n'), ((2038, 2164), 'graphscope.nx.Graph', 'nx.Graph', (["[('E', 'C', {'b': 'E', 'weight': 10}), ('B', 'A', {'b': 'A', 'weight': 7}),\n ('A', 'D', {'b': 'D', 'weight': 4})]"], {}), "([('E', 'C', {'b': 'E', 'weight': 10}), ('B', 'A', {'b': 'A',\n 'weight': 7}), ('A', 'D', {'b': 'D', 'weight': 4})])\n", (2046, 2164), True, 'import graphscope.nx as nx\n'), ((2258, 2315), 'graphscope.nx.from_pandas_edgelist', 'nx.from_pandas_edgelist', (['self.df', '(0)', '"""b"""', "['b', 'weight']"], {}), "(self.df, 0, 'b', ['b', 'weight'])\n", (2281, 2315), True, 'import graphscope.nx as nx\n'), ((2331, 2353), 'networkx.utils.graphs_equal', 'graphs_equal', (['G', 'Gtrue'], {}), '(G, Gtrue)\n', (2343, 2353), False, 'from networkx.utils import graphs_equal\n'), ((2703, 2729), 'graphscope.nx.Graph', 'nx.Graph', (['[(1, 1), (1, 2)]'], {}), '([(1, 1), (1, 2)])\n', (2711, 2729), True, 'import graphscope.nx as nx\n'), ((2743, 2779), 'graphscope.nx.to_pandas_adjacency', 'nx.to_pandas_adjacency', (['G'], {'dtype': 'int'}), '(G, dtype=int)\n', (2765, 2779), True, 'import graphscope.nx as nx\n'), ((2904, 2922), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(10)'], {}), '(10)\n', (2918, 2922), True, 'import graphscope.nx as nx\n'), ((3014, 3031), 'graphscope.nx.to_edgelist', 'nx.to_edgelist', (['G'], {}), '(G)\n', (3028, 3031), True, 'import graphscope.nx as nx\n'), ((3386, 3436), 'graphscope.nx.from_pandas_edgelist', 'nx.from_pandas_edgelist', (['edges'], {'edge_attr': '"""weight"""'}), "(edges, edge_attr='weight')\n", (3409, 3436), True, 'import graphscope.nx as nx\n'), ((3550, 3600), 'graphscope.nx.to_networkx_graph', 'nx.to_networkx_graph', (['edges'], {'create_using': 'nx.Graph'}), '(edges, create_using=nx.Graph)\n', (3570, 3600), True, 'import graphscope.nx as nx\n'), ((1200, 1234), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (1214, 1234), False, 'import os\n'), ((1838, 1872), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (1852, 1872), False, 'import os\n'), ((2388, 2422), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (2402, 2422), False, 'import os\n'), ((1686, 1698), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (1696, 1698), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import networkx as nx import numpy as np import pytest import graphscope from graphscope import bfs from graphscope import cdlp from graphscope import clustering from graphscope import degree_centrality from graphscope import eigenvector_centrality from graphscope import hits from graphscope import is_simple_path from graphscope import k_core from graphscope import k_shell from graphscope import katz_centrality from graphscope import louvain from graphscope import lpa from graphscope import pagerank from graphscope import property_sssp from graphscope import sssp from graphscope import triangles from graphscope import wcc from graphscope.framework.app import AppAssets from graphscope.framework.errors import InvalidArgumentError def test_create_app(): # builtin-ldbc compatible graph: arrow_projected dynamic_projected # builtin-property compatible graph: arrow_property, append_only # builtin-property app on property graph a1 = AppAssets(algo="property_sssp", context="labeled_vertex_data") # builtin app on arrow projected graph a2 = AppAssets(algo="sssp", context="vertex_data") # on dynamic projected graph a3 = AppAssets(algo="sssp_has_path", context="tensor") def test_compatible_with_dynamic_graph(dynamic_property_graph): # bfs with pytest.raises( InvalidArgumentError, match="Not compatible for arrow_property dynamic_property type", ): bfs(dynamic_property_graph, src=4) def test_run_app_on_directed_graph( p2p_project_directed_graph, sssp_result, pagerank_result, hits_result, bfs_result, clustering_result, dc_result, ev_result, katz_result, ): # sssp ctx1 = sssp(p2p_project_directed_graph, src=6) r1 = ( ctx1.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) r1[r1 == 1.7976931348623157e308] = float("inf") # replace limit::max with inf assert np.allclose(r1, sssp_result["directed"]) ctx2 = sssp(p2p_project_directed_graph, 6) r2 = ( ctx2.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) r2[r2 == 1.7976931348623157e308] = float("inf") # replace limit::max with inf assert np.allclose(r2, sssp_result["directed"]) assert np.allclose( ctx2.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy(), [[1.0, 260.0], [2.0, 229.0], [3.0, 310.0]], ) assert np.allclose( sorted(ctx1.to_numpy("r", vertex_range={"begin": 1, "end": 4})), sorted([260.0, 229.0, 310.0]), ) r3 = sssp(p2p_project_directed_graph, 100000000) assert r3 is not None # pagerank ctx_pr = pagerank(p2p_project_directed_graph, delta=0.85, max_round=10) ret_pr = ( ctx_pr.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(ret_pr, pagerank_result["directed"]) # hits ctx_hits = hits(p2p_project_directed_graph, tolerance=0.001) ret_hub = ( ctx_hits.to_dataframe({"node": "v.id", "hub": "r.hub"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) ret_auth = ( ctx_hits.to_dataframe({"node": "v.id", "auth": "r.auth"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(ret_hub, hits_result["hub"]) assert np.allclose(ret_auth, hits_result["auth"]) # bfs ctx4 = bfs(p2p_project_directed_graph, src=6) r4 = ( ctx4.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) assert np.all(r4 == bfs_result["directed"]) ctx5 = bfs(p2p_project_directed_graph, 6) r5 = ( ctx5.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) assert np.all(r5 == bfs_result["directed"]) assert np.all( ctx5.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy() == [[1, 5], [2, 5], [3, 6]] ) assert np.all( sorted(ctx5.to_numpy("r", vertex_range={"begin": 1, "end": 4})) == [5, 5, 6] ) # simple_path assert is_simple_path(p2p_project_directed_graph, [1, 10]) with pytest.raises( InvalidArgumentError, match="Louvain not support directed graph." ): louvain(p2p_project_directed_graph) # clustering ctx_clustering = clustering(p2p_project_directed_graph) ret_clustering = ( ctx_clustering.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(ret_clustering, clustering_result["directed"]) # degree_centrality ctx_dc = degree_centrality(p2p_project_directed_graph) ret_dc = ( ctx_dc.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(ret_dc, dc_result["directed"]) # eigenvector_centrality ctx_ev = eigenvector_centrality(p2p_project_directed_graph) # ret_ev = ( # ctx_ev.to_dataframe({"node": "v.id", "r": "r"}) # .sort_values(by=["node"]) # .to_numpy(dtype=float) # ) # assert np.allclose(ret_ev, ev_result["directed"]) # katz_centrality ctx_katz = katz_centrality(p2p_project_directed_graph) def test_app_on_undirected_graph( p2p_project_undirected_graph, sssp_result, pagerank_result, bfs_result, wcc_result, cdlp_result, triangles_result, kshell_result, ): # sssp ctx1 = sssp(p2p_project_undirected_graph, src=6) r1 = ( ctx1.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) r1[r1 == 1.7976931348623157e308] = float( "inf" ) # replace limit<double>::max with inf assert np.allclose(r1, sssp_result["undirected"]) assert np.allclose( ctx1.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy(), [[1.0, 31.0], [2.0, 39.0], [3.0, 78.0]], ) assert np.allclose( sorted(ctx1.to_numpy("r", vertex_range={"begin": 1, "end": 4})), [31.0, 39.0, 78.0], ) # pagerank (only work on undirected graph) ctx2 = pagerank(p2p_project_undirected_graph, delta=0.85, max_round=10) r2 = ( ctx2.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(r2, pagerank_result["undirected"]) ctx3 = pagerank(p2p_project_undirected_graph, 0.85, 10) r3 = ( ctx3.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(r3, pagerank_result["undirected"]) # r4 = pagerank(arrow_project_graph, 10, 0.85) # check max_round=10 # assert r4 is not None ctx5 = pagerank(p2p_project_undirected_graph, "0.85", "10") r5 = ( ctx5.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(r5, pagerank_result["undirected"]) ctx6 = pagerank(p2p_project_undirected_graph) r6 = ( ctx6.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(r6, pagerank_result["undirected"]) assert np.allclose( ctx6.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy(), [ [1.0, 6.153724343761569e-05], [2.0, 9.280361872165397e-05], [3.0, 1.643246086005906e-05], ], ) assert np.allclose( sorted(ctx6.to_numpy("r", vertex_range={"begin": 1, "end": 4})), sorted([6.153724343761569e-05, 9.280361872165397e-05, 1.643246086005906e-05]), ) # bfs ctx7 = bfs(p2p_project_undirected_graph, src=6) r7 = ( ctx7.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) assert np.all(r7 == bfs_result["undirected"]) assert np.all( ctx7.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy() == [[1, 1], [2, 2], [3, 2]] ) assert np.all( sorted(ctx7.to_numpy("r", vertex_range={"begin": 1, "end": 4})) == [1, 2, 2] ) # wcc ctx8 = wcc(p2p_project_undirected_graph) r8 = ( ctx8.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) assert np.all(r8 == wcc_result) assert np.all( ctx8.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy() == [[1, 1], [2, 1], [3, 1]] ) assert np.all(ctx8.to_numpy("r", vertex_range={"begin": 1, "end": 4}) == [1, 1, 1]) # cdlp ctx9 = cdlp(p2p_project_undirected_graph, max_round=10) r9 = ( ctx9.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) assert np.all(r9 == cdlp_result) assert np.all( ctx9.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy() == [[1, 1], [2, 2], [3, 2]] ) assert np.all( sorted(ctx9.to_numpy("r", vertex_range={"begin": 1, "end": 4})) == [1, 2, 2] ) # kshell ctx10 = k_shell(p2p_project_undirected_graph, k=3) r10 = ( ctx10.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=int) ) assert np.all(r10 == kshell_result) assert np.all( ctx10.to_dataframe( {"node": "v.id", "r": "r"}, vertex_range={"begin": 1, "end": 4} ) .sort_values(by=["node"]) .to_numpy() == [[1, 0], [2, 0], [3, 0]] ) assert np.all(ctx10.to_numpy("r", vertex_range={"begin": 1, "end": 4}) == [0, 0, 0]) # triangles ctx_triangles = triangles(p2p_project_undirected_graph) ret_triangles = ( ctx_triangles.to_dataframe({"node": "v.id", "r": "r"}) .sort_values(by=["node"]) .to_numpy(dtype=float) ) assert np.allclose(ret_triangles, triangles_result["undirected"]) # louvain ctx10 = louvain(p2p_project_undirected_graph, min_progress=50, progress_tries=2) # simple_path assert is_simple_path(p2p_project_undirected_graph, [1, 10]) def test_run_app_on_string_oid_graph(p2p_project_directed_graph_string): ctx = sssp(p2p_project_directed_graph_string, src="6") r1 = ctx.to_dataframe({"node": "v.id", "r": "r"}) assert r1[r1["node"] == "6"].r.values[0] == 0.0 @pytest.mark.skipif("FULL-TEST-SUITE" not in os.environ, reason="Run in nightly CI") def test_error_on_run_app(projected_pg_no_edge_data): # compile error: wrong type of edge data with sssp with pytest.raises(graphscope.CompilationError): sssp(projected_pg_no_edge_data, src=4)
[ "graphscope.katz_centrality", "graphscope.degree_centrality", "graphscope.wcc", "graphscope.louvain", "graphscope.cdlp", "graphscope.is_simple_path", "pytest.mark.skipif", "graphscope.hits", "graphscope.clustering", "graphscope.triangles", "graphscope.bfs", "graphscope.pagerank", "numpy.allclose", "numpy.all", "graphscope.sssp", "pytest.raises", "graphscope.eigenvector_centrality", "graphscope.k_shell", "graphscope.framework.app.AppAssets" ]
[((11920, 12008), 'pytest.mark.skipif', 'pytest.mark.skipif', (["('FULL-TEST-SUITE' not in os.environ)"], {'reason': '"""Run in nightly CI"""'}), "('FULL-TEST-SUITE' not in os.environ, reason=\n 'Run in nightly CI')\n", (11938, 12008), False, 'import pytest\n'), ((1636, 1698), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""property_sssp"""', 'context': '"""labeled_vertex_data"""'}), "(algo='property_sssp', context='labeled_vertex_data')\n", (1645, 1698), False, 'from graphscope.framework.app import AppAssets\n'), ((1751, 1796), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp"""', 'context': '"""vertex_data"""'}), "(algo='sssp', context='vertex_data')\n", (1760, 1796), False, 'from graphscope.framework.app import AppAssets\n'), ((1839, 1888), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp_has_path"""', 'context': '"""tensor"""'}), "(algo='sssp_has_path', context='tensor')\n", (1848, 1888), False, 'from graphscope.framework.app import AppAssets\n'), ((2378, 2417), 'graphscope.sssp', 'sssp', (['p2p_project_directed_graph'], {'src': '(6)'}), '(p2p_project_directed_graph, src=6)\n', (2382, 2417), False, 'from graphscope import sssp\n'), ((2648, 2688), 'numpy.allclose', 'np.allclose', (['r1', "sssp_result['directed']"], {}), "(r1, sssp_result['directed'])\n", (2659, 2688), True, 'import numpy as np\n'), ((2700, 2735), 'graphscope.sssp', 'sssp', (['p2p_project_directed_graph', '(6)'], {}), '(p2p_project_directed_graph, 6)\n', (2704, 2735), False, 'from graphscope import sssp\n'), ((2966, 3006), 'numpy.allclose', 'np.allclose', (['r2', "sssp_result['directed']"], {}), "(r2, sssp_result['directed'])\n", (2977, 3006), True, 'import numpy as np\n'), ((3409, 3452), 'graphscope.sssp', 'sssp', (['p2p_project_directed_graph', '(100000000)'], {}), '(p2p_project_directed_graph, 100000000)\n', (3413, 3452), False, 'from graphscope import sssp\n'), ((3508, 3570), 'graphscope.pagerank', 'pagerank', (['p2p_project_directed_graph'], {'delta': '(0.85)', 'max_round': '(10)'}), '(p2p_project_directed_graph, delta=0.85, max_round=10)\n', (3516, 3570), False, 'from graphscope import pagerank\n'), ((3724, 3772), 'numpy.allclose', 'np.allclose', (['ret_pr', "pagerank_result['directed']"], {}), "(ret_pr, pagerank_result['directed'])\n", (3735, 3772), True, 'import numpy as np\n'), ((3800, 3849), 'graphscope.hits', 'hits', (['p2p_project_directed_graph'], {'tolerance': '(0.001)'}), '(p2p_project_directed_graph, tolerance=0.001)\n', (3804, 3849), False, 'from graphscope import hits\n'), ((4166, 4206), 'numpy.allclose', 'np.allclose', (['ret_hub', "hits_result['hub']"], {}), "(ret_hub, hits_result['hub'])\n", (4177, 4206), True, 'import numpy as np\n'), ((4218, 4260), 'numpy.allclose', 'np.allclose', (['ret_auth', "hits_result['auth']"], {}), "(ret_auth, hits_result['auth'])\n", (4229, 4260), True, 'import numpy as np\n'), ((4283, 4321), 'graphscope.bfs', 'bfs', (['p2p_project_directed_graph'], {'src': '(6)'}), '(p2p_project_directed_graph, src=6)\n', (4286, 4321), False, 'from graphscope import bfs\n'), ((4467, 4503), 'numpy.all', 'np.all', (["(r4 == bfs_result['directed'])"], {}), "(r4 == bfs_result['directed'])\n", (4473, 4503), True, 'import numpy as np\n'), ((4515, 4549), 'graphscope.bfs', 'bfs', (['p2p_project_directed_graph', '(6)'], {}), '(p2p_project_directed_graph, 6)\n', (4518, 4549), False, 'from graphscope import bfs\n'), ((4695, 4731), 'numpy.all', 'np.all', (["(r5 == bfs_result['directed'])"], {}), "(r5 == bfs_result['directed'])\n", (4701, 4731), True, 'import numpy as np\n'), ((5100, 5151), 'graphscope.is_simple_path', 'is_simple_path', (['p2p_project_directed_graph', '[1, 10]'], {}), '(p2p_project_directed_graph, [1, 10])\n', (5114, 5151), False, 'from graphscope import is_simple_path\n'), ((5341, 5379), 'graphscope.clustering', 'clustering', (['p2p_project_directed_graph'], {}), '(p2p_project_directed_graph)\n', (5351, 5379), False, 'from graphscope import clustering\n'), ((5549, 5607), 'numpy.allclose', 'np.allclose', (['ret_clustering', "clustering_result['directed']"], {}), "(ret_clustering, clustering_result['directed'])\n", (5560, 5607), True, 'import numpy as np\n'), ((5646, 5691), 'graphscope.degree_centrality', 'degree_centrality', (['p2p_project_directed_graph'], {}), '(p2p_project_directed_graph)\n', (5663, 5691), False, 'from graphscope import degree_centrality\n'), ((5845, 5887), 'numpy.allclose', 'np.allclose', (['ret_dc', "dc_result['directed']"], {}), "(ret_dc, dc_result['directed'])\n", (5856, 5887), True, 'import numpy as np\n'), ((5931, 5981), 'graphscope.eigenvector_centrality', 'eigenvector_centrality', (['p2p_project_directed_graph'], {}), '(p2p_project_directed_graph)\n', (5953, 5981), False, 'from graphscope import eigenvector_centrality\n'), ((6228, 6271), 'graphscope.katz_centrality', 'katz_centrality', (['p2p_project_directed_graph'], {}), '(p2p_project_directed_graph)\n', (6243, 6271), False, 'from graphscope import katz_centrality\n'), ((6495, 6536), 'graphscope.sssp', 'sssp', (['p2p_project_undirected_graph'], {'src': '(6)'}), '(p2p_project_undirected_graph, src=6)\n', (6499, 6536), False, 'from graphscope import sssp\n'), ((6789, 6831), 'numpy.allclose', 'np.allclose', (['r1', "sssp_result['undirected']"], {}), "(r1, sssp_result['undirected'])\n", (6800, 6831), True, 'import numpy as np\n'), ((7269, 7333), 'graphscope.pagerank', 'pagerank', (['p2p_project_undirected_graph'], {'delta': '(0.85)', 'max_round': '(10)'}), '(p2p_project_undirected_graph, delta=0.85, max_round=10)\n', (7277, 7333), False, 'from graphscope import pagerank\n'), ((7481, 7527), 'numpy.allclose', 'np.allclose', (['r2', "pagerank_result['undirected']"], {}), "(r2, pagerank_result['undirected'])\n", (7492, 7527), True, 'import numpy as np\n'), ((7539, 7587), 'graphscope.pagerank', 'pagerank', (['p2p_project_undirected_graph', '(0.85)', '(10)'], {}), '(p2p_project_undirected_graph, 0.85, 10)\n', (7547, 7587), False, 'from graphscope import pagerank\n'), ((7735, 7781), 'numpy.allclose', 'np.allclose', (['r3', "pagerank_result['undirected']"], {}), "(r3, pagerank_result['undirected'])\n", (7746, 7781), True, 'import numpy as np\n'), ((7893, 7945), 'graphscope.pagerank', 'pagerank', (['p2p_project_undirected_graph', '"""0.85"""', '"""10"""'], {}), "(p2p_project_undirected_graph, '0.85', '10')\n", (7901, 7945), False, 'from graphscope import pagerank\n'), ((8093, 8139), 'numpy.allclose', 'np.allclose', (['r5', "pagerank_result['undirected']"], {}), "(r5, pagerank_result['undirected'])\n", (8104, 8139), True, 'import numpy as np\n'), ((8151, 8189), 'graphscope.pagerank', 'pagerank', (['p2p_project_undirected_graph'], {}), '(p2p_project_undirected_graph)\n', (8159, 8189), False, 'from graphscope import pagerank\n'), ((8337, 8383), 'numpy.allclose', 'np.allclose', (['r6', "pagerank_result['undirected']"], {}), "(r6, pagerank_result['undirected'])\n", (8348, 8383), True, 'import numpy as np\n'), ((8941, 8981), 'graphscope.bfs', 'bfs', (['p2p_project_undirected_graph'], {'src': '(6)'}), '(p2p_project_undirected_graph, src=6)\n', (8944, 8981), False, 'from graphscope import bfs\n'), ((9127, 9165), 'numpy.all', 'np.all', (["(r7 == bfs_result['undirected'])"], {}), "(r7 == bfs_result['undirected'])\n", (9133, 9165), True, 'import numpy as np\n'), ((9526, 9559), 'graphscope.wcc', 'wcc', (['p2p_project_undirected_graph'], {}), '(p2p_project_undirected_graph)\n', (9529, 9559), False, 'from graphscope import wcc\n'), ((9705, 9729), 'numpy.all', 'np.all', (['(r8 == wcc_result)'], {}), '(r8 == wcc_result)\n', (9711, 9729), True, 'import numpy as np\n'), ((10069, 10117), 'graphscope.cdlp', 'cdlp', (['p2p_project_undirected_graph'], {'max_round': '(10)'}), '(p2p_project_undirected_graph, max_round=10)\n', (10073, 10117), False, 'from graphscope import cdlp\n'), ((10263, 10288), 'numpy.all', 'np.all', (['(r9 == cdlp_result)'], {}), '(r9 == cdlp_result)\n', (10269, 10288), True, 'import numpy as np\n'), ((10653, 10695), 'graphscope.k_shell', 'k_shell', (['p2p_project_undirected_graph'], {'k': '(3)'}), '(p2p_project_undirected_graph, k=3)\n', (10660, 10695), False, 'from graphscope import k_shell\n'), ((10843, 10871), 'numpy.all', 'np.all', (['(r10 == kshell_result)'], {}), '(r10 == kshell_result)\n', (10849, 10871), True, 'import numpy as np\n'), ((11227, 11266), 'graphscope.triangles', 'triangles', (['p2p_project_undirected_graph'], {}), '(p2p_project_undirected_graph)\n', (11236, 11266), False, 'from graphscope import triangles\n'), ((11434, 11492), 'numpy.allclose', 'np.allclose', (['ret_triangles', "triangles_result['undirected']"], {}), "(ret_triangles, triangles_result['undirected'])\n", (11445, 11492), True, 'import numpy as np\n'), ((11520, 11592), 'graphscope.louvain', 'louvain', (['p2p_project_undirected_graph'], {'min_progress': '(50)', 'progress_tries': '(2)'}), '(p2p_project_undirected_graph, min_progress=50, progress_tries=2)\n', (11527, 11592), False, 'from graphscope import louvain\n'), ((11623, 11676), 'graphscope.is_simple_path', 'is_simple_path', (['p2p_project_undirected_graph', '[1, 10]'], {}), '(p2p_project_undirected_graph, [1, 10])\n', (11637, 11676), False, 'from graphscope import is_simple_path\n'), ((11762, 11810), 'graphscope.sssp', 'sssp', (['p2p_project_directed_graph_string'], {'src': '"""6"""'}), "(p2p_project_directed_graph_string, src='6')\n", (11766, 11810), False, 'from graphscope import sssp\n'), ((1974, 2079), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""Not compatible for arrow_property dynamic_property type"""'}), "(InvalidArgumentError, match=\n 'Not compatible for arrow_property dynamic_property type')\n", (1987, 2079), False, 'import pytest\n'), ((2107, 2141), 'graphscope.bfs', 'bfs', (['dynamic_property_graph'], {'src': '(4)'}), '(dynamic_property_graph, src=4)\n', (2110, 2141), False, 'from graphscope import bfs\n'), ((5162, 5247), 'pytest.raises', 'pytest.raises', (['InvalidArgumentError'], {'match': '"""Louvain not support directed graph."""'}), "(InvalidArgumentError, match='Louvain not support directed graph.'\n )\n", (5175, 5247), False, 'import pytest\n'), ((5266, 5301), 'graphscope.louvain', 'louvain', (['p2p_project_directed_graph'], {}), '(p2p_project_directed_graph)\n', (5273, 5301), False, 'from graphscope import louvain\n'), ((12122, 12164), 'pytest.raises', 'pytest.raises', (['graphscope.CompilationError'], {}), '(graphscope.CompilationError)\n', (12135, 12164), False, 'import pytest\n'), ((12174, 12212), 'graphscope.sssp', 'sssp', (['projected_pg_no_edge_data'], {'src': '(4)'}), '(projected_pg_no_edge_data, src=4)\n', (12178, 12212), False, 'from graphscope import sssp\n')]
import pytest from graphscope import nx from graphscope.nx.tests.utils import replace_with_inf @pytest.mark.usefixtures("graphscope_session") class TestRunGenericPath: def setup_method(self): self.edges = [(0, 1), (0, 2), (1, 2), (2, 3), (1, 4)] G = nx.Graph() G.add_edges_from(self.edges, weight=1) DG = nx.DiGraph() DG.add_edges_from(self.edges, weight=1) self.G = G self.DG = DG def teardown_method(self): del self.G del self.edges def test_run_shortest_path(self): nx.builtin.shortest_path(self.G, source=0, weight="weight") def test_run_shortest_path_length(self): nx.builtin.single_source_dijkstra_path_length(self.G, source=0, weight="weight") def test_run_average_shortest_path_length(self): nx.builtin.average_shortest_path_length(self.G, weight="weight") def test_run_has_path(self): assert nx.builtin.has_path(self.G, source=0, target=3) def test_shortest_path_length_on_reverse_view(self): ret1 = nx.builtin.single_source_dijkstra_path_length( self.DG, source=2, weight="weight" ) assert replace_with_inf(ret1) == { 0.0: float("inf"), 1.0: float("inf"), 2.0: 0.0, 3.0: 1.0, 4.0: float("inf"), } RDG = self.DG.reverse(copy=False) ret2 = nx.builtin.single_source_dijkstra_path_length( RDG, source=2, weight="weight" ) assert replace_with_inf(ret2) == { 0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: float("inf"), 4.0: float("inf"), } def test_shortest_path_length_on_directed_view(self): ret1 = nx.builtin.single_source_dijkstra_path_length( self.G, source=2, weight="weight" ) assert ret1 == {0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: 1.0, 4.0: 2.0} DG = self.G.to_directed(as_view=True) ret2 = nx.builtin.single_source_dijkstra_path_length( DG, source=2, weight="weight" ) assert ret2 == {0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: 1.0, 4.0: 2.0} def test_all_pairs_shortest_path_length(self): cycle = nx.cycle_graph(7) pl = nx.builtin.all_pairs_shortest_path_length(cycle) assert pl[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} assert pl[1] == {0: 1, 1: 0, 2: 1, 3: 2, 4: 3, 5: 3, 6: 2} for e in cycle.edges: cycle.edges[e]["weight"] = 1 cycle[1][2]["weight"] = 10 pl = nx.builtin.all_pairs_shortest_path_length(cycle, weight="weight") assert pl[0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} assert pl[1] == {0: 1, 1: 0, 2: 6, 3: 5, 4: 4, 5: 3, 6: 2}
[ "graphscope.nx.builtin.all_pairs_shortest_path_length", "graphscope.nx.tests.utils.replace_with_inf", "graphscope.nx.builtin.shortest_path", "graphscope.nx.builtin.has_path", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.Graph", "graphscope.nx.cycle_graph", "pytest.mark.usefixtures", "graphscope.nx.builtin.average_shortest_path_length", "graphscope.nx.DiGraph" ]
[((99, 144), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (122, 144), False, 'import pytest\n'), ((273, 283), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (281, 283), False, 'from graphscope import nx\n'), ((344, 356), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (354, 356), False, 'from graphscope import nx\n'), ((566, 625), 'graphscope.nx.builtin.shortest_path', 'nx.builtin.shortest_path', (['self.G'], {'source': '(0)', 'weight': '"""weight"""'}), "(self.G, source=0, weight='weight')\n", (590, 625), False, 'from graphscope import nx\n'), ((680, 765), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.G'], {'source': '(0)', 'weight': '"""weight"""'}), "(self.G, source=0, weight='weight'\n )\n", (725, 765), False, 'from graphscope import nx\n'), ((823, 887), 'graphscope.nx.builtin.average_shortest_path_length', 'nx.builtin.average_shortest_path_length', (['self.G'], {'weight': '"""weight"""'}), "(self.G, weight='weight')\n", (862, 887), False, 'from graphscope import nx\n'), ((937, 984), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.G'], {'source': '(0)', 'target': '(3)'}), '(self.G, source=0, target=3)\n', (956, 984), False, 'from graphscope import nx\n'), ((1058, 1144), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.DG'], {'source': '(2)', 'weight': '"""weight"""'}), "(self.DG, source=2, weight=\n 'weight')\n", (1103, 1144), False, 'from graphscope import nx\n'), ((1409, 1486), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['RDG'], {'source': '(2)', 'weight': '"""weight"""'}), "(RDG, source=2, weight='weight')\n", (1454, 1486), False, 'from graphscope import nx\n'), ((1764, 1849), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.G'], {'source': '(2)', 'weight': '"""weight"""'}), "(self.G, source=2, weight='weight'\n )\n", (1809, 1849), False, 'from graphscope import nx\n'), ((2002, 2078), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['DG'], {'source': '(2)', 'weight': '"""weight"""'}), "(DG, source=2, weight='weight')\n", (2047, 2078), False, 'from graphscope import nx\n'), ((2243, 2260), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(7)'], {}), '(7)\n', (2257, 2260), False, 'from graphscope import nx\n'), ((2274, 2322), 'graphscope.nx.builtin.all_pairs_shortest_path_length', 'nx.builtin.all_pairs_shortest_path_length', (['cycle'], {}), '(cycle)\n', (2315, 2322), False, 'from graphscope import nx\n'), ((2577, 2642), 'graphscope.nx.builtin.all_pairs_shortest_path_length', 'nx.builtin.all_pairs_shortest_path_length', (['cycle'], {'weight': '"""weight"""'}), "(cycle, weight='weight')\n", (2618, 2642), False, 'from graphscope import nx\n'), ((1177, 1199), 'graphscope.nx.tests.utils.replace_with_inf', 'replace_with_inf', (['ret1'], {}), '(ret1)\n', (1193, 1199), False, 'from graphscope.nx.tests.utils import replace_with_inf\n'), ((1524, 1546), 'graphscope.nx.tests.utils.replace_with_inf', 'replace_with_inf', (['ret2'], {}), '(ret2)\n', (1540, 1546), False, 'from graphscope.nx.tests.utils import replace_with_inf\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pandas as pd import pytest import vineyard import vineyard.io from graphscope import hits from graphscope import lpa from graphscope import property_bfs from graphscope import property_sssp from graphscope import sssp from graphscope.framework.app import AppAssets def test_simple_context_to_numpy(simple_context): out = simple_context.to_numpy("v.id") assert out.shape == (40521,) out = simple_context.to_numpy("v.data") assert out.shape == (40521,) # selector of `e` is not done yet. # out = simple_context.to_numpy('e.src') # out = simple_context.to_numpy('e.dst') # out = simple_context.to_numpy('e.data') out = simple_context.to_numpy("r") assert out.shape == (40521,) def test_simple_context_to_dataframe(simple_context): out = simple_context.to_dataframe({"id": "v.id", "data": "v.data", "result": "r"}) assert out.shape == (40521, 3) def test_simple_context_to_vineyard_tensor(simple_context, p2p_project_directed_graph): out = simple_context.to_vineyard_tensor("v.id") assert out is not None out = simple_context.to_vineyard_tensor("r") assert out is not None has_path = AppAssets(algo="sssp_has_path") ctx = has_path( p2p_project_directed_graph._project_to_simple(), source=6, target=3728 ) assert ctx.to_vineyard_tensor(axis=0) is not None def test_simple_context_to_vineyard_dataframe( simple_context, p2p_project_directed_graph ): out = simple_context.to_vineyard_dataframe( {"id": "v.id", "data": "v.data", "result": "r"} ) assert out is not None def test_property_context_to_numpy(property_context): out = property_context.to_numpy("v:v0.weight") assert out.shape == (40521,) out = property_context.to_numpy("r:v1.dist_1") assert out.shape == (40786,) def test_property_context_to_dataframe(property_context): out = property_context.to_dataframe({"id": "v:v0.id", "result": "r:v0.dist_0"}) assert out.shape == (40521, 2) out = property_context.to_dataframe({"id": "v:v1.id", "result": "r:v1.dist_1"}) assert out.shape == (40786, 2) def test_property_context_output(property_context): property_context.output_to_client( fd="/tmp/r0", selector={"id": "v:v0.id", "result": "r:v0.dist_0"} ) out = pd.read_csv("/tmp/r0") assert out.shape == (40521, 2) def test_property_context_to_vineyard_tensor(property_context): out = property_context.to_vineyard_tensor("v:v0.id") assert out is not None def test_property_context_to_vineyard_dataframe(graphscope_session, property_context): out = property_context.to_vineyard_dataframe( {"id": "v:v0.id", "data": "v:v0.weight", "result": "r:v0.dist_0"} ) assert out is not None # info = graphscope_session.info # conf = info["engine_config"] # vineyard_endpoint = conf["vineyard_rpc_endpoint"] # vineyard_ipc_socket = conf["vineyard_socket"] # print("run: vineyard_read_vineyard_dataframe {} {} 1 0".format(vineyard_ipc_socket, 'vineyard://'+str(out))) # comment out this part since the user have to custom ssh.sh and remove a line in the 01-stream.py:56 # FIXME: DFToFile has problem again, caused by stale vineyard process # dfstream = vineyard.io.open( # "vineyard://" + str(out), # vineyard_ipc_socket=vineyard_ipc_socket, # vineyard_endpoint=vineyard_endpoint, # ) # vineyard.io.open( # "file:///tmp/test_property_context_to_vineyard_dataframe", # dfstream, # mode="w", # vineyard_ipc_socket=vineyard_ipc_socket, # vineyard_endpoint=vineyard_endpoint, # ) # assert out is not None def test_add_column(arrow_property_graph, property_context): g2 = arrow_property_graph.add_column( property_context, {"result_0": "r:v0.dist_0", "result_1": "r:v1.dist_1"} ) assert "result_0" in [p.name for p in g2.schema.get_vertex_properties("v0")] assert "result_1" in [p.name for p in g2.schema.get_vertex_properties("v1")] def test_add_column_after_computation(arrow_property_graph): sg = arrow_property_graph.project(vertices={"v0": ["id"]}, edges={"e0": ["weight"]}) ret = sssp(sg, 20) g2 = arrow_property_graph.add_column( ret, {"id_col": "v.id", "data_col": "v.data", "result_col": "r"} ) assert "id_col" in [p.name for p in g2.schema.get_vertex_properties("v0")] assert "data_col" in [p.name for p in g2.schema.get_vertex_properties("v0")] assert "result_col" in [p.name for p in g2.schema.get_vertex_properties("v0")] def test_lpa(arrow_property_graph_lpa): ret = ( lpa(arrow_property_graph_lpa, max_round=20) .to_dataframe( {"node": "v:v0.id", "label0": "r:v0.label_0", "label1": "r:v0.label_1"} ) .sort_values(by=["node"]) ) def test_error_on_selector(property_context): with pytest.raises(KeyError, match="non_exist_label"): out = property_context.to_numpy("v:non_exist_label.id") with pytest.raises(KeyError, match="non_exist_prop"): out = property_context.to_numpy("v:v0.non_exist_prop") with pytest.raises(RuntimeError, match="selector cannot be None"): out = property_context.to_numpy(selector=None) with pytest.raises(ValueError, match="not enough values to unpack"): out = property_context.to_numpy("xxx") with pytest.raises(SyntaxError, match="Invalid selector"): out = property_context.to_numpy("xxx:a.b")
[ "pytest.raises", "graphscope.lpa", "graphscope.framework.app.AppAssets", "pandas.read_csv", "graphscope.sssp" ]
[((1841, 1872), 'graphscope.framework.app.AppAssets', 'AppAssets', ([], {'algo': '"""sssp_has_path"""'}), "(algo='sssp_has_path')\n", (1850, 1872), False, 'from graphscope.framework.app import AppAssets\n'), ((2973, 2995), 'pandas.read_csv', 'pd.read_csv', (['"""/tmp/r0"""'], {}), "('/tmp/r0')\n", (2984, 2995), True, 'import pandas as pd\n'), ((4864, 4876), 'graphscope.sssp', 'sssp', (['sg', '(20)'], {}), '(sg, 20)\n', (4868, 4876), False, 'from graphscope import sssp\n'), ((5561, 5609), 'pytest.raises', 'pytest.raises', (['KeyError'], {'match': '"""non_exist_label"""'}), "(KeyError, match='non_exist_label')\n", (5574, 5609), False, 'import pytest\n'), ((5684, 5731), 'pytest.raises', 'pytest.raises', (['KeyError'], {'match': '"""non_exist_prop"""'}), "(KeyError, match='non_exist_prop')\n", (5697, 5731), False, 'import pytest\n'), ((5805, 5865), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""selector cannot be None"""'}), "(RuntimeError, match='selector cannot be None')\n", (5818, 5865), False, 'import pytest\n'), ((5931, 5993), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""not enough values to unpack"""'}), "(ValueError, match='not enough values to unpack')\n", (5944, 5993), False, 'import pytest\n'), ((6051, 6103), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {'match': '"""Invalid selector"""'}), "(SyntaxError, match='Invalid selector')\n", (6064, 6103), False, 'import pytest\n'), ((5303, 5346), 'graphscope.lpa', 'lpa', (['arrow_property_graph_lpa'], {'max_round': '(20)'}), '(arrow_property_graph_lpa, max_round=20)\n', (5306, 5346), False, 'from graphscope import lpa\n')]
import networkx.algorithms.tests.test_voronoi import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tests.test_voronoi, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tests.test_voronoi import TestVoronoiCells @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestVoronoiCells) class TestVoronoiCells: @pytest.mark.skip(reason="not support multigraph") def test_multigraph_unweighted(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multidigraph_unweighted(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multigraph_weighted(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multidigraph_weighted(self): pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((409, 454), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (432, 454), False, 'import pytest\n'), ((456, 500), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestVoronoiCells'], {}), '(TestVoronoiCells)\n', (482, 500), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((530, 579), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (546, 579), False, 'import pytest\n'), ((641, 690), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (657, 690), False, 'import pytest\n'), ((754, 803), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (770, 803), False, 'import pytest\n'), ((863, 912), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (879, 912), False, 'import pytest\n'), ((290, 335), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (313, 335), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import pytest from networkx.tests.test_convert_scipy import TestConvertNumpy import graphscope.nx as nx from graphscope.nx.generators.classic import barbell_graph from graphscope.nx.generators.classic import cycle_graph from graphscope.nx.generators.classic import path_graph from graphscope.nx.tests.utils import assert_graphs_equal from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestConvertNumpy) class TestConvertNumpy: @pytest.mark.skip(reason="graphscope.nx not support numpy dtype yet") def test_identity_graph_matrix(self): "Conversion from graph to sparse matrix to graph." A = nx.to_scipy_sparse_matrix(self.G1) self.identity_conversion(self.G1, A, nx.Graph()) @pytest.mark.skip(reason="graphscope.nx not support numpy dtype yet") def test_identity_digraph_matrix(self): "Conversion from digraph to sparse matrix to digraph." A = nx.to_scipy_sparse_matrix(self.G2) self.identity_conversion(self.G2, A, nx.DiGraph()) @pytest.mark.skip(reason="graphscope.nx not support numpy dtype yet") def test_identity_weighted_graph_matrix(self): """Conversion from weighted graph to sparse matrix to weighted graph.""" A = nx.to_scipy_sparse_matrix(self.G3) self.identity_conversion(self.G3, A, nx.Graph()) @pytest.mark.skip(reason="graphscope.nx not support numpy dtype yet") def test_identity_weighted_digraph_matrix(self): """Conversion from weighted digraph to sparse matrix to weighted digraph.""" A = nx.to_scipy_sparse_matrix(self.G4) self.identity_conversion(self.G4, A, nx.DiGraph()) @pytest.mark.skip(reason="graphscope.nx not support numpy dtype yet") def test_nodelist(self): """Conversion from graph to sparse matrix to graph with nodelist.""" P4 = path_graph(4) P3 = path_graph(3) nodelist = list(P3.nodes()) A = nx.to_scipy_sparse_matrix(P4, nodelist=nodelist) GA = nx.Graph(A) self.assert_isomorphic(GA, P3) # Make nodelist ambiguous by containing duplicates. nodelist += [nodelist[0]] pytest.raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist) @pytest.mark.skip(reason="graphscope.nx not support numpy dtype yet") def test_from_scipy_sparse_matrix_parallel_edges(self): """Tests that the :func:`networkx.from_scipy_sparse_matrix` function interprets integer weights as the number of parallel edges when creating a multigraph. """ A = sparse.csr_matrix([[1, 1], [1, 2]]) # First, with a simple graph, each integer entry in the adjacency # matrix is interpreted as the weight of a single edge in the graph. expected = nx.DiGraph() edges = [(0, 0), (0, 1), (1, 0)] expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) expected.add_edge(1, 1, weight=2) actual = nx.from_scipy_sparse_matrix( A, parallel_edges=True, create_using=nx.DiGraph ) assert_graphs_equal(actual, expected) actual = nx.from_scipy_sparse_matrix( A, parallel_edges=False, create_using=nx.DiGraph ) assert_graphs_equal(actual, expected) # Now each integer entry in the adjacency matrix is interpreted as the # number of parallel edges in the graph if the appropriate keyword # argument is specified. edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)] expected = nx.MultiDiGraph() expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) actual = nx.from_scipy_sparse_matrix( A, parallel_edges=True, create_using=nx.MultiDiGraph ) assert_graphs_equal(actual, expected) expected = nx.MultiDiGraph() expected.add_edges_from(set(edges), weight=1) # The sole self-loop (edge 0) on vertex 1 should have weight 2. expected[1][1][0]["weight"] = 2 actual = nx.from_scipy_sparse_matrix( A, parallel_edges=False, create_using=nx.MultiDiGraph ) assert_graphs_equal(actual, expected)
[ "graphscope.nx.tests.utils.assert_graphs_equal", "graphscope.nx.DiGraph", "graphscope.nx.to_scipy_sparse_matrix", "graphscope.nx.utils.compat.with_graphscope_nx_context", "graphscope.nx.generators.classic.path_graph", "graphscope.nx.Graph", "graphscope.nx.from_scipy_sparse_matrix", "pytest.raises", "graphscope.nx.MultiDiGraph", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((804, 849), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (827, 849), False, 'import pytest\n'), ((851, 895), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestConvertNumpy'], {}), '(TestConvertNumpy)\n', (877, 895), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((925, 993), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx not support numpy dtype yet"""'}), "(reason='graphscope.nx not support numpy dtype yet')\n", (941, 993), False, 'import pytest\n'), ((1205, 1273), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx not support numpy dtype yet"""'}), "(reason='graphscope.nx not support numpy dtype yet')\n", (1221, 1273), False, 'import pytest\n'), ((1493, 1561), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx not support numpy dtype yet"""'}), "(reason='graphscope.nx not support numpy dtype yet')\n", (1509, 1561), False, 'import pytest\n'), ((1804, 1872), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx not support numpy dtype yet"""'}), "(reason='graphscope.nx not support numpy dtype yet')\n", (1820, 1872), False, 'import pytest\n'), ((2123, 2191), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx not support numpy dtype yet"""'}), "(reason='graphscope.nx not support numpy dtype yet')\n", (2139, 2191), False, 'import pytest\n'), ((2697, 2765), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""graphscope.nx not support numpy dtype yet"""'}), "(reason='graphscope.nx not support numpy dtype yet')\n", (2713, 2765), False, 'import pytest\n'), ((1107, 1141), 'graphscope.nx.to_scipy_sparse_matrix', 'nx.to_scipy_sparse_matrix', (['self.G1'], {}), '(self.G1)\n', (1132, 1141), True, 'import graphscope.nx as nx\n'), ((1393, 1427), 'graphscope.nx.to_scipy_sparse_matrix', 'nx.to_scipy_sparse_matrix', (['self.G2'], {}), '(self.G2)\n', (1418, 1427), True, 'import graphscope.nx as nx\n'), ((1706, 1740), 'graphscope.nx.to_scipy_sparse_matrix', 'nx.to_scipy_sparse_matrix', (['self.G3'], {}), '(self.G3)\n', (1731, 1740), True, 'import graphscope.nx as nx\n'), ((2023, 2057), 'graphscope.nx.to_scipy_sparse_matrix', 'nx.to_scipy_sparse_matrix', (['self.G4'], {}), '(self.G4)\n', (2048, 2057), True, 'import graphscope.nx as nx\n'), ((2311, 2324), 'graphscope.nx.generators.classic.path_graph', 'path_graph', (['(4)'], {}), '(4)\n', (2321, 2324), False, 'from graphscope.nx.generators.classic import path_graph\n'), ((2338, 2351), 'graphscope.nx.generators.classic.path_graph', 'path_graph', (['(3)'], {}), '(3)\n', (2348, 2351), False, 'from graphscope.nx.generators.classic import path_graph\n'), ((2400, 2448), 'graphscope.nx.to_scipy_sparse_matrix', 'nx.to_scipy_sparse_matrix', (['P4'], {'nodelist': 'nodelist'}), '(P4, nodelist=nodelist)\n', (2425, 2448), True, 'import graphscope.nx as nx\n'), ((2462, 2473), 'graphscope.nx.Graph', 'nx.Graph', (['A'], {}), '(A)\n', (2470, 2473), True, 'import graphscope.nx as nx\n'), ((2616, 2690), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.to_numpy_matrix', 'P3'], {'nodelist': 'nodelist'}), '(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist)\n', (2629, 2690), False, 'import pytest\n'), ((3237, 3249), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (3247, 3249), True, 'import graphscope.nx as nx\n'), ((3424, 3500), 'graphscope.nx.from_scipy_sparse_matrix', 'nx.from_scipy_sparse_matrix', (['A'], {'parallel_edges': '(True)', 'create_using': 'nx.DiGraph'}), '(A, parallel_edges=True, create_using=nx.DiGraph)\n', (3451, 3500), True, 'import graphscope.nx as nx\n'), ((3531, 3568), 'graphscope.nx.tests.utils.assert_graphs_equal', 'assert_graphs_equal', (['actual', 'expected'], {}), '(actual, expected)\n', (3550, 3568), False, 'from graphscope.nx.tests.utils import assert_graphs_equal\n'), ((3586, 3663), 'graphscope.nx.from_scipy_sparse_matrix', 'nx.from_scipy_sparse_matrix', (['A'], {'parallel_edges': '(False)', 'create_using': 'nx.DiGraph'}), '(A, parallel_edges=False, create_using=nx.DiGraph)\n', (3613, 3663), True, 'import graphscope.nx as nx\n'), ((3694, 3731), 'graphscope.nx.tests.utils.assert_graphs_equal', 'assert_graphs_equal', (['actual', 'expected'], {}), '(actual, expected)\n', (3713, 3731), False, 'from graphscope.nx.tests.utils import assert_graphs_equal\n'), ((3995, 4012), 'graphscope.nx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (4010, 4012), True, 'import graphscope.nx as nx\n'), ((4104, 4190), 'graphscope.nx.from_scipy_sparse_matrix', 'nx.from_scipy_sparse_matrix', (['A'], {'parallel_edges': '(True)', 'create_using': 'nx.MultiDiGraph'}), '(A, parallel_edges=True, create_using=nx.\n MultiDiGraph)\n', (4131, 4190), True, 'import graphscope.nx as nx\n'), ((4216, 4253), 'graphscope.nx.tests.utils.assert_graphs_equal', 'assert_graphs_equal', (['actual', 'expected'], {}), '(actual, expected)\n', (4235, 4253), False, 'from graphscope.nx.tests.utils import assert_graphs_equal\n'), ((4273, 4290), 'graphscope.nx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (4288, 4290), True, 'import graphscope.nx as nx\n'), ((4474, 4561), 'graphscope.nx.from_scipy_sparse_matrix', 'nx.from_scipy_sparse_matrix', (['A'], {'parallel_edges': '(False)', 'create_using': 'nx.MultiDiGraph'}), '(A, parallel_edges=False, create_using=nx.\n MultiDiGraph)\n', (4501, 4561), True, 'import graphscope.nx as nx\n'), ((4587, 4624), 'graphscope.nx.tests.utils.assert_graphs_equal', 'assert_graphs_equal', (['actual', 'expected'], {}), '(actual, expected)\n', (4606, 4624), False, 'from graphscope.nx.tests.utils import assert_graphs_equal\n'), ((1187, 1197), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1195, 1197), True, 'import graphscope.nx as nx\n'), ((1473, 1485), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (1483, 1485), True, 'import graphscope.nx as nx\n'), ((1786, 1796), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (1794, 1796), True, 'import graphscope.nx as nx\n'), ((2103, 2115), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2113, 2115), True, 'import graphscope.nx as nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import logging import pathlib from typing import Dict from typing import Sequence from typing import Tuple from urllib.parse import urlparse import numpy as np import pandas as pd import pyarrow as pa from graphscope.framework import utils from graphscope.framework.errors import check_argument from graphscope.proto import attr_value_pb2 from graphscope.proto import types_pb2 try: import vineyard except ImportError: vineyard = None logger = logging.getLogger("graphscope") class CSVOptions(object): """Options to read from CSV files. Avaiable options are: - column delimiters - include a subset of columns - types of each columns - whether the file contains a header """ def __init__(self) -> None: # Field delimiter self.delimiter = "," # If non-empty, indicates the names of columns from the CSV file that should # be actually read and converted (in the list's order). # Columns not in this list will be ignored. self.include_columns = [] # Optional per-column types (disabling type inference on those columns) self.column_types = [] # include_columns always contains id column for v, src id and dst id column for e # if it contains and only contains those id columns, we suppose user actually want to # read all other properties. (Otherwise they should specify at least one property) self.force_include_all = False # If true, column names will be read from the first CSV row # If false, column names will be of the form "f0", "f1"... self.header_row = True def to_dict(self) -> Dict: options = {} options["delimiter"] = self.delimiter options["header_row"] = self.header_row if self.include_columns: options["schema"] = ",".join(self.include_columns) if self.column_types: cpp_types = [utils.data_type_to_cpp(dt) for dt in self.column_types] options["column_types"] = ",".join(cpp_types) if self.force_include_all: options["include_all_columns"] = self.force_include_all return options def __str__(self) -> str: return "&".join(["{}={}".format(k, v) for k, v in self.to_dict().items()]) def __repr__(self) -> str: return self.__str__() class Loader(object): """Generic data source wrapper. Loader can take various data sources, and assemble necessary information into a AttrValue. """ def __init__(self, source, delimiter=",", header_row=True, **kwargs): """Initialize a loader with configurable options. Note: Loader cannot be reused since it may change inner state when constructing information for loading a graph. Args: source (str or value): The data source to be load, which could be one of the followings: * local file: specified by URL :code:`file://...` * oss file: specified by URL :code:`oss://...` * hdfs file: specified by URL :code:`hdfs://...` * s3 file: specified by URL :code:`s3://...` * numpy ndarray, in CSR format * pandas dataframe Ordinary data sources can be loaded using vineyard stream as well, a :code:`vineyard://` prefix can be used in the URL then the local file, oss object or HDFS file will be loaded into a vineyard stream first, then GraphScope's fragment will be built upon those streams in vineyard. Once the stream IO in vineyard reaches a stable state, it will be the default mode to load data sources and construct fragments in GraphScope. delimiter (char, optional): Column delimiter. Defaults to ',' header_row (bool, optional): Whether source have a header. If true, column names will be read from the first row of source, else they are named by 'f0', 'f1', .... Defaults to True. Notes: Data is resolved by drivers in `libvineyard <https://github.com/alibaba/libvineyard>`_ . See more additional info in `Loading Graph` section of Docs, and implementations in `libvineyard`. """ self.protocol = "" # For numpy or pandas, source is the serialized raw bytes # For files, it's the location # For vineyard, it's the ID or name self.source = "" # options for data source is csv self.options = CSVOptions() check_argument( isinstance(delimiter, str) and len(delimiter) == 1, "The delimiter must be a single charactor, cannot be '%s'" % delimiter, ) self.options.delimiter = delimiter self.options.header_row = header_row # metas for data source is numpy or dataframe self.deduced_properties = None # extra args directly passed to storage system # find more details in fsspec # https://filesystem-spec.readthedocs.io/en/latest/ self.storage_options = kwargs # also parse protocol and source in `resolve` method self.resolve(source) def __str__(self) -> str: return "{}: {}".format(self.protocol, self.source) def __repr__(self) -> str: return self.__str__() def resolve(self, source): """Dispatch resolver based on type of souce. Args: source: Different data sources Raises: RuntimeError: If the source is a not supported type. """ if isinstance(source, str): self.process_location(source) elif isinstance(source, pathlib.Path): self.process_location(str(source)) elif isinstance(source, pd.DataFrame): self.process_pandas(source) elif vineyard is not None and isinstance( source, (vineyard.Object, vineyard.ObjectID, vineyard.ObjectName) ): self.process_vy_object(source) elif isinstance(source, Sequence): # Assume a list of numpy array are passed as COO matrix, with length >= 2. # Formats: [src_id, dst_id, prop_1, ..., prop_n] check_argument(all([isinstance(item, np.ndarray) for item in source])) self.process_numpy(source) else: raise RuntimeError("Not support source", source) def process_location(self, source): self.protocol = urlparse(source).scheme # If protocol is not set, use 'file' as default if not self.protocol: self.protocol = "file" self.source = source def process_numpy(self, source: Sequence[np.ndarray]): """Transform arrays to equivalent DataFrame, note the transpose is necessary. """ col_names = ["f%s" % i for i in range(len(source))] df = pd.DataFrame(source, col_names).T types = {} for i, _ in enumerate(source): types[col_names[i]] = source[i].dtype df = df.astype(types) return self.process_pandas(df) def process_pandas(self, source: pd.DataFrame): self.protocol = "pandas" col_names = list(source.columns.values) col_types = [utils._from_numpy_dtype(dtype) for dtype in source.dtypes.values] table = pa.Table.from_pandas(source, preserve_index=False) sink = pa.BufferOutputStream() with pa.ipc.new_stream(sink, table.schema) as writer: writer.write_table(table) buf = sink.getvalue() self.deduced_properties = list(zip(col_names, col_types)) self.source = bytes(memoryview(buf)) def process_vy_object(self, source): self.protocol = "vineyard" # encoding: add a `o` prefix to object id, and a `s` prefix to object name. if isinstance(source, vineyard.Object): self.source = "o%s" % repr(source.id) elif isinstance(source, vineyard.ObjectID): self.source = "o%s" % repr(source) elif isinstance(source, vineyard.ObjectName): self.source = "s%s" % str(source) else: raise ValueError( "Invalid input source: not a vineyard's Object, ObjectID or ObjectName" ) def select_columns(self, columns: Sequence[Tuple[str, int]], include_all=False): self.options.include_columns = [] self.options.column_types = [] for name, data_type in columns: self.options.include_columns.append(name) self.options.column_types.append(data_type) self.options.force_include_all = include_all def get_attr(self): attr = attr_value_pb2.AttrValue() attr.func.name = "loader" attr.func.attr[types_pb2.PROTOCOL].CopyFrom(utils.s_to_attr(self.protocol)) # Let graphscope handle local files cause it's implemented in c++ and # doesn't add an additional stream layer. # Maybe handled by vineyard in the near future if self.protocol == "file": source = "{}#{}".format(self.source, self.options) attr.func.attr[types_pb2.VALUES].CopyFrom( utils.bytes_to_attr(source.encode("utf-8")) ) elif self.protocol == "pandas": attr.func.attr[types_pb2.VALUES].CopyFrom(utils.bytes_to_attr(self.source)) else: # Let vineyard handle other data source. attr.func.attr[types_pb2.VALUES].CopyFrom( utils.bytes_to_attr(self.source.encode("utf-8")) ) if self.protocol != "vineyard": # need spawn an io stream in coordinator attr.func.attr[types_pb2.STORAGE_OPTIONS].CopyFrom( utils.s_to_attr(json.dumps(self.storage_options)) ) attr.func.attr[types_pb2.READ_OPTIONS].CopyFrom( utils.s_to_attr(json.dumps(self.options.to_dict())) ) return attr
[ "pandas.DataFrame", "graphscope.framework.utils._from_numpy_dtype", "graphscope.framework.utils.data_type_to_cpp", "graphscope.framework.utils.s_to_attr", "logging.getLogger", "pyarrow.ipc.new_stream", "pyarrow.Table.from_pandas", "graphscope.framework.utils.bytes_to_attr", "json.dumps", "pyarrow.BufferOutputStream", "graphscope.proto.attr_value_pb2.AttrValue", "urllib.parse.urlparse" ]
[((1135, 1166), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1152, 1166), False, 'import logging\n'), ((8094, 8144), 'pyarrow.Table.from_pandas', 'pa.Table.from_pandas', (['source'], {'preserve_index': '(False)'}), '(source, preserve_index=False)\n', (8114, 8144), True, 'import pyarrow as pa\n'), ((8160, 8183), 'pyarrow.BufferOutputStream', 'pa.BufferOutputStream', ([], {}), '()\n', (8181, 8183), True, 'import pyarrow as pa\n'), ((9440, 9466), 'graphscope.proto.attr_value_pb2.AttrValue', 'attr_value_pb2.AttrValue', ([], {}), '()\n', (9464, 9466), False, 'from graphscope.proto import attr_value_pb2\n'), ((7232, 7248), 'urllib.parse.urlparse', 'urlparse', (['source'], {}), '(source)\n', (7240, 7248), False, 'from urllib.parse import urlparse\n'), ((7645, 7676), 'pandas.DataFrame', 'pd.DataFrame', (['source', 'col_names'], {}), '(source, col_names)\n', (7657, 7676), True, 'import pandas as pd\n'), ((8011, 8041), 'graphscope.framework.utils._from_numpy_dtype', 'utils._from_numpy_dtype', (['dtype'], {}), '(dtype)\n', (8034, 8041), False, 'from graphscope.framework import utils\n'), ((8197, 8234), 'pyarrow.ipc.new_stream', 'pa.ipc.new_stream', (['sink', 'table.schema'], {}), '(sink, table.schema)\n', (8214, 8234), True, 'import pyarrow as pa\n'), ((9553, 9583), 'graphscope.framework.utils.s_to_attr', 'utils.s_to_attr', (['self.protocol'], {}), '(self.protocol)\n', (9568, 9583), False, 'from graphscope.framework import utils\n'), ((2625, 2651), 'graphscope.framework.utils.data_type_to_cpp', 'utils.data_type_to_cpp', (['dt'], {}), '(dt)\n', (2647, 2651), False, 'from graphscope.framework import utils\n'), ((10090, 10122), 'graphscope.framework.utils.bytes_to_attr', 'utils.bytes_to_attr', (['self.source'], {}), '(self.source)\n', (10109, 10122), False, 'from graphscope.framework import utils\n'), ((10519, 10551), 'json.dumps', 'json.dumps', (['self.storage_options'], {}), '(self.storage_options)\n', (10529, 10551), False, 'import json\n')]
import networkx.algorithms.tests.test_euler import pytest from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tests.test_euler, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tests.test_euler import TestEulerianCircuit from networkx.algorithms.tests.test_euler import TestEulerize from networkx.algorithms.tests.test_euler import TestIsEulerian @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestIsEulerian) class TestIsEulerian: def test_is_eulerian2(self): # not connected G = nx.Graph() G.add_nodes_from([1, 2, 3]) assert not nx.is_eulerian(G) # not strongly connected G = nx.DiGraph() G.add_nodes_from([1, 2, 3]) assert not nx.is_eulerian(G) @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestEulerianCircuit) class TestEulerianCircuit: @pytest.mark.skip(reason="not support multigraph") def test_multigraph(self): pass @pytest.mark.skip(reason="not support multigraph") def test_multigraph_with_keys(self): pass @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestEulerize) class TestEulerize: def test_on_complete_graph(self): G = nx.complete_graph(4) assert nx.is_eulerian(nx.eulerize(G)) @pytest.mark.skip(reason="not support multigraph") def test_null_multigraph(self): pass @pytest.mark.skip(reason="not support multigraph") def test_on_eulerian_multigraph(self): pass
[ "graphscope.experimental.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((558, 603), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (581, 603), False, 'import pytest\n'), ((605, 647), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestIsEulerian'], {}), '(TestIsEulerian)\n', (631, 647), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((957, 1002), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (980, 1002), False, 'import pytest\n'), ((1004, 1051), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestEulerianCircuit'], {}), '(TestEulerianCircuit)\n', (1030, 1051), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1291, 1336), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (1314, 1336), False, 'import pytest\n'), ((1338, 1378), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestEulerize'], {}), '(TestEulerize)\n', (1364, 1378), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((1084, 1133), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1100, 1133), False, 'import pytest\n'), ((1184, 1233), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1200, 1233), False, 'import pytest\n'), ((1522, 1571), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1538, 1571), False, 'import pytest\n'), ((1627, 1676), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1643, 1676), False, 'import pytest\n'), ((312, 357), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (335, 357), False, 'import pytest\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file edgelist.py is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/readwrite/edgelist.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import networkx.readwrite.edgelist from networkx.readwrite.edgelist import parse_edgelist as _parse_edgelist from networkx.readwrite.edgelist import read_edgelist as _read_edgelist from networkx.utils.decorators import open_file from graphscope import nx from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import patch_docstring import_as_graphscope_nx(networkx.readwrite.edgelist) @patch_docstring(_parse_edgelist) def parse_edgelist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True ): from ast import literal_eval G = nx.empty_graph(0, create_using) edges = [] for line in lines: p = line.find(comments) if p >= 0: line = line[:p] if not len(line): continue # split line, should have 2 or more s = line.strip().split(delimiter) if len(s) < 2: continue u = s.pop(0) v = s.pop(0) d = s if nodetype is not None: try: u = nodetype(u) v = nodetype(v) except Exception as e: raise TypeError( "Failed to convert nodes %s,%s to type %s." % (u, v, nodetype) ) from e if len(d) == 0 or data is False: # no data or data type specified edgedata = {} elif data is True: # no edge types specified try: # try to evaluate as dictionary edgedata = dict(literal_eval(" ".join(d))) except Exception as e: raise TypeError( "Failed to convert edge data (%s) to dictionary." % (d) ) from e else: # convert edge data to dictionary with specified keys and type if len(d) != len(data): raise IndexError( "Edge data %s and data_keys %s are not the same length" % (d, data) ) edgedata = {} for (edge_key, edge_type), edge_value in zip(data, d): try: edge_value = edge_type(edge_value) except Exception as e: raise TypeError( "Failed to convert %s data %s to type %s." % (edge_key, edge_value, edge_type) ) from e edgedata.update({edge_key: edge_value}) edges.append((u, v, edgedata)) G.add_edges_from(edges) return G @open_file(0, mode="rb") def read_edgelist( path, comments="#", delimiter=None, create_using=None, nodetype=None, data=True, edgetype=None, encoding="utf-8", ): """Read a graph from a list of edges. Parameters ---------- path : file or string File or filename to read. If a file is provided, it must be opened in 'rb' mode. Filenames ending in .gz or .bz2 will be uncompressed. comments : string, optional The character used to indicate the start of a comment. delimiter : string, optional The string used to separate values. The default is whitespace. create_using : NetworkX graph constructor, optional (default=nx.Graph) Graph type to create. If graph instance, then cleared before populated. nodetype : int, float, str, tuple, bool Python object, optional Convert node data from strings to specified type data : bool or list of (label,type) tuples Tuples specifying dictionary key names and types for edge data edgetype : int, float, str, tuple, bool Python object, optional OBSOLETE Convert edge data from strings to specified type and use as 'weight' encoding: string, optional Specify which encoding to use when reading file. Returns ------- G : graph A networkx Graph or other type specified with create_using See Also -------- read_adjlist Notes ----- Since nodes must be hashable, the function nodetype must return hashable types (e.g. int, float, str, frozenset - or tuples of those, etc.) """ lines = (line.decode(encoding) for line in path) return parse_edgelist( lines, comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, data=data, )
[ "networkx.utils.decorators.open_file", "graphscope.nx.empty_graph", "graphscope.nx.utils.compat.patch_docstring", "graphscope.nx.utils.compat.import_as_graphscope_nx" ]
[((873, 925), 'graphscope.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.readwrite.edgelist'], {}), '(networkx.readwrite.edgelist)\n', (896, 925), False, 'from graphscope.nx.utils.compat import import_as_graphscope_nx\n'), ((929, 961), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_parse_edgelist'], {}), '(_parse_edgelist)\n', (944, 961), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((3045, 3068), 'networkx.utils.decorators.open_file', 'open_file', (['(0)'], {'mode': '"""rb"""'}), "(0, mode='rb')\n", (3054, 3068), False, 'from networkx.utils.decorators import open_file\n'), ((1112, 1143), 'graphscope.nx.empty_graph', 'nx.empty_graph', (['(0)', 'create_using'], {}), '(0, create_using)\n', (1126, 1143), False, 'from graphscope import nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file adjlist.py is referred and derived from project NetworkX, # # https://github.com/networkx/networkx/blob/master/networkx/readwrite/adjlist.py # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import networkx.readwrite.adjlist from networkx.readwrite.adjlist import parse_adjlist as _parse_adjlist from networkx.utils.decorators import open_file from graphscope import nx from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import patch_docstring import_as_graphscope_nx(networkx.readwrite.adjlist) @patch_docstring(_parse_adjlist) def parse_adjlist( lines, comments="#", delimiter=None, create_using=None, nodetype=None ): G = nx.empty_graph(0, create_using) edges = [] nodes = [] # nodes that has not any adjacency for line in lines: p = line.find(comments) if p >= 0: line = line[:p] if not line: continue vlist = line.strip().split(delimiter) u = vlist.pop(0) # convert types if nodetype is not None: try: u = nodetype(u) except Exception as e: raise TypeError( "Failed to convert node ({}) to type {}".format(u, nodetype) ) from e if len(vlist) == 0: nodes.append(u) if nodetype is not None: try: vlist = map(nodetype, vlist) except Exception as e: raise TypeError( "Failed to convert nodes ({}) to type {}".format( ",".join(vlist), nodetype ) ) from e edges.extend([u, v] for v in vlist) # N.B: batch add edges to graph. if nodes: G.add_nodes_from(nodes) G.add_edges_from(edges) return G @open_file(0, mode="rb") def read_adjlist( path, comments="#", delimiter=None, create_using=None, nodetype=None, encoding="utf-8", ): """Read graph in adjacency list format from path. Parameters ---------- path : string or file Filename or file handle to read. Filenames ending in .gz or .bz2 will be uncompressed. create_using : graphscope.nx graph constructor, optional (default=nx.Graph) Graph type to create. If graph instance, then cleared before populated. nodetype : int, str, float, tuple, bool Python object, optional Convert nodes to this type. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels. The default is whitespace. Returns ------- G: graphscope.nx graph The graph corresponding to the lines in adjacency list format. Notes ----- This format does not store graph or node data. See Also -------- read_edgelist """ lines = (line.decode(encoding) for line in path) return parse_adjlist( lines, comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, ) # fixture for pytest def teardown_module(module): import os for fname in ["test.adjlist", "test.adjlist.gz"]: if os.path.isfile(fname): os.unlink(fname)
[ "graphscope.nx.empty_graph", "os.unlink", "networkx.utils.decorators.open_file", "graphscope.nx.utils.compat.import_as_graphscope_nx", "os.path.isfile", "graphscope.nx.utils.compat.patch_docstring" ]
[((795, 846), 'graphscope.nx.utils.compat.import_as_graphscope_nx', 'import_as_graphscope_nx', (['networkx.readwrite.adjlist'], {}), '(networkx.readwrite.adjlist)\n', (818, 846), False, 'from graphscope.nx.utils.compat import import_as_graphscope_nx\n'), ((850, 881), 'graphscope.nx.utils.compat.patch_docstring', 'patch_docstring', (['_parse_adjlist'], {}), '(_parse_adjlist)\n', (865, 881), False, 'from graphscope.nx.utils.compat import patch_docstring\n'), ((2136, 2159), 'networkx.utils.decorators.open_file', 'open_file', (['(0)'], {'mode': '"""rb"""'}), "(0, mode='rb')\n", (2145, 2159), False, 'from networkx.utils.decorators import open_file\n'), ((986, 1017), 'graphscope.nx.empty_graph', 'nx.empty_graph', (['(0)', 'create_using'], {}), '(0, create_using)\n', (1000, 1017), False, 'from graphscope import nx\n'), ((3528, 3549), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (3542, 3549), False, 'import os\n'), ((3563, 3579), 'os.unlink', 'os.unlink', (['fname'], {}), '(fname)\n', (3572, 3579), False, 'import os\n')]
import pytest from networkx.testing import almost_equal from graphscope import nx from graphscope.nx.tests.utils import replace_with_inf @pytest.mark.usefixtures("graphscope_session") class TestRunGenericPath: def setup_class(cls): cls.edges = [(0, 1), (0, 2), (1, 2), (2, 3), (1, 4)] G = nx.Graph() G.add_edges_from(cls.edges, weight=1) DG = nx.DiGraph() DG.add_edges_from(cls.edges, weight=1) cls.G = G cls.DG = DG def test_run_shortest_path(self): nx.builtin.shortest_path(self.G, source=0, weight="weight") def test_run_shortest_path_length(self): nx.builtin.single_source_dijkstra_path_length(self.G, source=0, weight="weight") def test_run_average_shortest_path_length(self): nx.builtin.average_shortest_path_length(self.G, weight="weight") def test_run_has_path(self): assert nx.builtin.has_path(self.G, source=0, target=3) def test_shortest_path_length_on_reverse_view(self): ret1 = nx.builtin.single_source_dijkstra_path_length( self.DG, source=2, weight="weight" ) assert replace_with_inf(ret1) == { 0.0: float("inf"), 1.0: float("inf"), 2.0: 0.0, 3.0: 1.0, 4.0: float("inf"), } RDG = self.DG.reverse(copy=True) ret2 = nx.builtin.single_source_dijkstra_path_length( RDG, source=2, weight="weight" ) assert replace_with_inf(ret2) == { 0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: float("inf"), 4.0: float("inf"), } def test_shortest_path_length_on_directed_view(self): ret1 = nx.builtin.single_source_dijkstra_path_length( self.G, source=2, weight="weight" ) assert ret1 == {0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: 1.0, 4.0: 2.0} DG = self.G.to_directed(as_view=True) ret2 = nx.builtin.single_source_dijkstra_path_length( DG, source=2, weight="weight" ) assert ret2 == {0.0: 1.0, 1.0: 1.0, 2.0: 0.0, 3.0: 1.0, 4.0: 2.0} @pytest.mark.skip(reason="DynamicFragment duplicated mode not ready.") def test_all_pairs_shortest_path_length(self): cycle = nx.cycle_graph(7) pl = nx.builtin.all_pairs_shortest_path_length(cycle) assert pl[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} assert pl[1] == {0: 1, 1: 0, 2: 1, 3: 2, 4: 3, 5: 3, 6: 2} for e in cycle.edges: cycle.edges[e]["weight"] = 1 cycle[1][2]["weight"] = 10 pl = nx.builtin.all_pairs_shortest_path_length(cycle, weight="weight") assert pl[0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} assert pl[1] == {0: 1, 1: 0, 2: 6, 3: 5, 4: 4, 5: 3, 6: 2} @pytest.mark.usefixtures("graphscope_session") class TestGenericPath: @classmethod def setup_class(cls): from networkx import convert_node_labels_to_integers as cnlti from networkx import grid_2d_graph grid = cnlti(grid_2d_graph(4, 4), first_label=1, ordering="sorted") cls.grid = nx.Graph(grid) cls.cycle = nx.cycle_graph(7) cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) cls.neg_weights = nx.DiGraph() cls.neg_weights.add_edge(0, 1, weight=1) cls.neg_weights.add_edge(0, 2, weight=3) cls.neg_weights.add_edge(1, 3, weight=1) cls.neg_weights.add_edge(2, 3, weight=-2) def test_has_path(self): G = nx.Graph() nx.add_path(G, range(3)) nx.add_path(G, range(3, 5)) assert nx.builtin.has_path(G, 0, 2) assert not nx.builtin.has_path(G, 0, 4) @pytest.mark.usefixtures("graphscope_session") class TestAverageShortestPathLength: def test_cycle_graph(self): ans = nx.average_shortest_path_length(nx.cycle_graph(7)) assert almost_equal(ans, 2) def test_path_graph(self): ans = nx.average_shortest_path_length(nx.path_graph(5)) assert almost_equal(ans, 2) def test_weighted(self): G = nx.Graph() nx.add_cycle(G, range(7), weight=2) ans = nx.average_shortest_path_length(G, weight="weight") assert almost_equal(ans, 4) G = nx.Graph() nx.add_path(G, range(5), weight=2) ans = nx.average_shortest_path_length(G, weight="weight") assert almost_equal(ans, 4) @pytest.mark.skip(reason="not support specify method.") def test_specified_methods(self): G = nx.Graph() nx.add_cycle(G, range(7), weight=2) ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") assert almost_equal(ans, 4) ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") assert almost_equal(ans, 4) ans = nx.average_shortest_path_length( G, weight="weight", method="floyd-warshall" ) assert almost_equal(ans, 4) G = nx.Graph() nx.add_path(G, range(5), weight=2) ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") assert almost_equal(ans, 4) ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") assert almost_equal(ans, 4) ans = nx.average_shortest_path_length( G, weight="weight", method="floyd-warshall" ) assert almost_equal(ans, 4) @pytest.mark.skip( reason="TODO(@weibin): raise disconnected error when result is inf." ) def test_disconnected(self): g = nx.Graph() g.add_nodes_from(range(3)) g.add_edge(0, 1) pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g) g = g.to_directed() pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g) def test_trivial_graph(self): """Tests that the trivial graph has average path length zero, since there is exactly one path of length zero in the trivial graph. For more information, see issue #1960. """ G = nx.trivial_graph() assert nx.average_shortest_path_length(G) == 0 def test_null_graph(self): with pytest.raises(nx.NetworkXPointlessConcept): nx.average_shortest_path_length(nx.null_graph()) @pytest.mark.skip(reason="not support specify method.") def test_bad_method(self): with pytest.raises(ValueError): G = nx.path_graph(2) nx.average_shortest_path_length(G, weight="weight", method="SPAM")
[ "graphscope.nx.cycle_graph", "graphscope.nx.DiGraph", "graphscope.nx.builtin.has_path", "graphscope.nx.builtin.average_shortest_path_length", "graphscope.nx.path_graph", "networkx.testing.almost_equal", "graphscope.nx.average_shortest_path_length", "graphscope.nx.builtin.all_pairs_shortest_path_length", "networkx.grid_2d_graph", "graphscope.nx.Graph", "graphscope.nx.tests.utils.replace_with_inf", "pytest.raises", "graphscope.nx.builtin.single_source_dijkstra_path_length", "graphscope.nx.trivial_graph", "graphscope.nx.builtin.shortest_path", "pytest.mark.skip", "graphscope.nx.null_graph", "pytest.mark.usefixtures" ]
[((141, 186), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (164, 186), False, 'import pytest\n'), ((2815, 2860), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (2838, 2860), False, 'import pytest\n'), ((3716, 3761), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (3739, 3761), False, 'import pytest\n'), ((2141, 2210), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""DynamicFragment duplicated mode not ready."""'}), "(reason='DynamicFragment duplicated mode not ready.')\n", (2157, 2210), False, 'import pytest\n'), ((4437, 4491), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support specify method."""'}), "(reason='not support specify method.')\n", (4453, 4491), False, 'import pytest\n'), ((5460, 5551), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""TODO(@weibin): raise disconnected error when result is inf."""'}), "(reason=\n 'TODO(@weibin): raise disconnected error when result is inf.')\n", (5476, 5551), False, 'import pytest\n'), ((6350, 6404), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support specify method."""'}), "(reason='not support specify method.')\n", (6366, 6404), False, 'import pytest\n'), ((312, 322), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (320, 322), False, 'from graphscope import nx\n'), ((382, 394), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (392, 394), False, 'from graphscope import nx\n'), ((527, 586), 'graphscope.nx.builtin.shortest_path', 'nx.builtin.shortest_path', (['self.G'], {'source': '(0)', 'weight': '"""weight"""'}), "(self.G, source=0, weight='weight')\n", (551, 586), False, 'from graphscope import nx\n'), ((641, 726), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.G'], {'source': '(0)', 'weight': '"""weight"""'}), "(self.G, source=0, weight='weight'\n )\n", (686, 726), False, 'from graphscope import nx\n'), ((784, 848), 'graphscope.nx.builtin.average_shortest_path_length', 'nx.builtin.average_shortest_path_length', (['self.G'], {'weight': '"""weight"""'}), "(self.G, weight='weight')\n", (823, 848), False, 'from graphscope import nx\n'), ((898, 945), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['self.G'], {'source': '(0)', 'target': '(3)'}), '(self.G, source=0, target=3)\n', (917, 945), False, 'from graphscope import nx\n'), ((1019, 1105), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.DG'], {'source': '(2)', 'weight': '"""weight"""'}), "(self.DG, source=2, weight=\n 'weight')\n", (1064, 1105), False, 'from graphscope import nx\n'), ((1369, 1446), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['RDG'], {'source': '(2)', 'weight': '"""weight"""'}), "(RDG, source=2, weight='weight')\n", (1414, 1446), False, 'from graphscope import nx\n'), ((1724, 1809), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['self.G'], {'source': '(2)', 'weight': '"""weight"""'}), "(self.G, source=2, weight='weight'\n )\n", (1769, 1809), False, 'from graphscope import nx\n'), ((1962, 2038), 'graphscope.nx.builtin.single_source_dijkstra_path_length', 'nx.builtin.single_source_dijkstra_path_length', (['DG'], {'source': '(2)', 'weight': '"""weight"""'}), "(DG, source=2, weight='weight')\n", (2007, 2038), False, 'from graphscope import nx\n'), ((2278, 2295), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(7)'], {}), '(7)\n', (2292, 2295), False, 'from graphscope import nx\n'), ((2309, 2357), 'graphscope.nx.builtin.all_pairs_shortest_path_length', 'nx.builtin.all_pairs_shortest_path_length', (['cycle'], {}), '(cycle)\n', (2350, 2357), False, 'from graphscope import nx\n'), ((2612, 2677), 'graphscope.nx.builtin.all_pairs_shortest_path_length', 'nx.builtin.all_pairs_shortest_path_length', (['cycle'], {'weight': '"""weight"""'}), "(cycle, weight='weight')\n", (2653, 2677), False, 'from graphscope import nx\n'), ((3136, 3150), 'graphscope.nx.Graph', 'nx.Graph', (['grid'], {}), '(grid)\n', (3144, 3150), False, 'from graphscope import nx\n'), ((3171, 3188), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(7)'], {}), '(7)\n', (3185, 3188), False, 'from graphscope import nx\n'), ((3289, 3301), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (3299, 3301), False, 'from graphscope import nx\n'), ((3541, 3551), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (3549, 3551), False, 'from graphscope import nx\n'), ((3636, 3664), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['G', '(0)', '(2)'], {}), '(G, 0, 2)\n', (3655, 3664), False, 'from graphscope import nx\n'), ((3911, 3931), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(2)'], {}), '(ans, 2)\n', (3923, 3931), False, 'from networkx.testing import almost_equal\n'), ((4043, 4063), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(2)'], {}), '(ans, 2)\n', (4055, 4063), False, 'from networkx.testing import almost_equal\n'), ((4106, 4116), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (4114, 4116), False, 'from graphscope import nx\n'), ((4175, 4226), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""'}), "(G, weight='weight')\n", (4206, 4226), False, 'from graphscope import nx\n'), ((4242, 4262), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (4254, 4262), False, 'from networkx.testing import almost_equal\n'), ((4275, 4285), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (4283, 4285), False, 'from graphscope import nx\n'), ((4343, 4394), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""'}), "(G, weight='weight')\n", (4374, 4394), False, 'from graphscope import nx\n'), ((4410, 4430), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (4422, 4430), False, 'from networkx.testing import almost_equal\n'), ((4542, 4552), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (4550, 4552), False, 'from graphscope import nx\n'), ((4611, 4681), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""dijkstra"""'}), "(G, weight='weight', method='dijkstra')\n", (4642, 4681), False, 'from graphscope import nx\n'), ((4697, 4717), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (4709, 4717), False, 'from networkx.testing import almost_equal\n'), ((4732, 4806), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""bellman-ford"""'}), "(G, weight='weight', method='bellman-ford')\n", (4763, 4806), False, 'from graphscope import nx\n'), ((4822, 4842), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (4834, 4842), False, 'from networkx.testing import almost_equal\n'), ((4857, 4933), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""floyd-warshall"""'}), "(G, weight='weight', method='floyd-warshall')\n", (4888, 4933), False, 'from graphscope import nx\n'), ((4971, 4991), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (4983, 4991), False, 'from networkx.testing import almost_equal\n'), ((5005, 5015), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (5013, 5015), False, 'from graphscope import nx\n'), ((5073, 5143), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""dijkstra"""'}), "(G, weight='weight', method='dijkstra')\n", (5104, 5143), False, 'from graphscope import nx\n'), ((5159, 5179), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (5171, 5179), False, 'from networkx.testing import almost_equal\n'), ((5194, 5268), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""bellman-ford"""'}), "(G, weight='weight', method='bellman-ford')\n", (5225, 5268), False, 'from graphscope import nx\n'), ((5284, 5304), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (5296, 5304), False, 'from networkx.testing import almost_equal\n'), ((5319, 5395), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""floyd-warshall"""'}), "(G, weight='weight', method='floyd-warshall')\n", (5350, 5395), False, 'from graphscope import nx\n'), ((5433, 5453), 'networkx.testing.almost_equal', 'almost_equal', (['ans', '(4)'], {}), '(ans, 4)\n', (5445, 5453), False, 'from networkx.testing import almost_equal\n'), ((5606, 5616), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (5614, 5616), False, 'from graphscope import nx\n'), ((5685, 5752), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.average_shortest_path_length', 'g'], {}), '(nx.NetworkXError, nx.average_shortest_path_length, g)\n', (5698, 5752), False, 'import pytest\n'), ((5789, 5856), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.average_shortest_path_length', 'g'], {}), '(nx.NetworkXError, nx.average_shortest_path_length, g)\n', (5802, 5856), False, 'import pytest\n'), ((6120, 6138), 'graphscope.nx.trivial_graph', 'nx.trivial_graph', ([], {}), '()\n', (6136, 6138), False, 'from graphscope import nx\n'), ((1138, 1160), 'graphscope.nx.tests.utils.replace_with_inf', 'replace_with_inf', (['ret1'], {}), '(ret1)\n', (1154, 1160), False, 'from graphscope.nx.tests.utils import replace_with_inf\n'), ((1484, 1506), 'graphscope.nx.tests.utils.replace_with_inf', 'replace_with_inf', (['ret2'], {}), '(ret2)\n', (1500, 1506), False, 'from graphscope.nx.tests.utils import replace_with_inf\n'), ((3062, 3081), 'networkx.grid_2d_graph', 'grid_2d_graph', (['(4)', '(4)'], {}), '(4, 4)\n', (3075, 3081), False, 'from networkx import grid_2d_graph\n'), ((3684, 3712), 'graphscope.nx.builtin.has_path', 'nx.builtin.has_path', (['G', '(0)', '(4)'], {}), '(G, 0, 4)\n', (3703, 3712), False, 'from graphscope import nx\n'), ((3877, 3894), 'graphscope.nx.cycle_graph', 'nx.cycle_graph', (['(7)'], {}), '(7)\n', (3891, 3894), False, 'from graphscope import nx\n'), ((4010, 4026), 'graphscope.nx.path_graph', 'nx.path_graph', (['(5)'], {}), '(5)\n', (4023, 4026), False, 'from graphscope import nx\n'), ((6154, 6188), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {}), '(G)\n', (6185, 6188), False, 'from graphscope import nx\n'), ((6239, 6281), 'pytest.raises', 'pytest.raises', (['nx.NetworkXPointlessConcept'], {}), '(nx.NetworkXPointlessConcept)\n', (6252, 6281), False, 'import pytest\n'), ((6449, 6474), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6462, 6474), False, 'import pytest\n'), ((6492, 6508), 'graphscope.nx.path_graph', 'nx.path_graph', (['(2)'], {}), '(2)\n', (6505, 6508), False, 'from graphscope import nx\n'), ((6521, 6587), 'graphscope.nx.average_shortest_path_length', 'nx.average_shortest_path_length', (['G'], {'weight': '"""weight"""', 'method': '"""SPAM"""'}), "(G, weight='weight', method='SPAM')\n", (6552, 6587), False, 'from graphscope import nx\n'), ((3249, 3261), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (3259, 3261), False, 'from graphscope import nx\n'), ((6327, 6342), 'graphscope.nx.null_graph', 'nx.null_graph', ([], {}), '()\n', (6340, 6342), False, 'from graphscope import nx\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import os import random import string import subprocess import sys import pytest import graphscope from graphscope.config import GSConfig as gs_config from graphscope.dataset.ldbc import load_ldbc from graphscope.dataset.modern_graph import load_modern_graph from graphscope.framework.loader import Loader logger = logging.getLogger("graphscope") @pytest.fixture def data_dir(): return "/testingdata/ldbc_sample" @pytest.fixture def modern_graph_data_dir(): return "/testingdata/modern_graph" def get_gs_image_on_ci_env(): if "GS_IMAGE" in os.environ: return os.environ["GS_IMAGE"] else: return gs_config.GS_IMAGE def test_demo(data_dir): image = get_gs_image_on_ci_env() sess = graphscope.session( show_log=True, num_workers=1, k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem="2500Mi", k8s_vineyard_cpu=0.1, k8s_vineyard_mem="512Mi", k8s_engine_cpu=0.1, k8s_engine_mem="1500Mi", k8s_vineyard_shared_mem="2Gi", ) graph = load_ldbc(sess, data_dir) # Interactive engine interactive = sess.gremlin(graph) sub_graph = interactive.subgraph( # noqa: F841 'g.V().hasLabel("person").outE("knows")' ) # Analytical engine # project the projected graph to simple graph. simple_g = sub_graph.project_to_simple(v_label="person", e_label="knows") pr_result = graphscope.pagerank(simple_g, delta=0.8) tc_result = graphscope.triangles(simple_g) # add the PageRank and triangle-counting results as new columns to the property graph # FIXME: Add column to sub_graph sub_graph.add_column(pr_result, {"Ranking": "r"}) sub_graph.add_column(tc_result, {"TC": "r"}) # GNN engine sess.close() def test_demo_distribute(data_dir, modern_graph_data_dir): image = get_gs_image_on_ci_env() sess = graphscope.session( show_log=True, num_workers=1, k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem="2500Mi", k8s_vineyard_cpu=0.1, k8s_vineyard_mem="512Mi", k8s_engine_cpu=0.1, k8s_engine_mem="1500Mi", k8s_vineyard_shared_mem="2Gi", ) graph = load_ldbc(sess, data_dir) # Interactive engine interactive = sess.gremlin(graph) sub_graph = interactive.subgraph( # noqa: F841 'g.V().hasLabel("person").outE("knows")' ) person_count = ( interactive.execute( 'g.V().hasLabel("person").outE("knows").bothV().dedup().count()' ) .all() .result()[0] ) knows_count = ( interactive.execute('g.V().hasLabel("person").outE("knows").count()') .all() .result()[0] ) interactive2 = sess.gremlin(sub_graph) sub_person_count = interactive2.execute("g.V().count()").all().result()[0] sub_knows_count = interactive2.execute("g.E().count()").all().result()[0] assert person_count == sub_person_count assert knows_count == sub_knows_count # Analytical engine # project the projected graph to simple graph. simple_g = sub_graph.project_to_simple(v_label="person", e_label="knows") pr_result = graphscope.pagerank(simple_g, delta=0.8) tc_result = graphscope.triangles(simple_g) # add the PageRank and triangle-counting results as new columns to the property graph # FIXME: Add column to sub_graph sub_graph.add_column(pr_result, {"Ranking": "r"}) sub_graph.add_column(tc_result, {"TC": "r"}) # test subgraph on modern graph mgraph = load_modern_graph(sess, modern_graph_data_dir) # Interactive engine minteractive = sess.gremlin(mgraph) msub_graph = minteractive.subgraph( # noqa: F841 'g.V().hasLabel("person").outE("knows")' ) person_count = ( minteractive.execute( 'g.V().hasLabel("person").outE("knows").bothV().dedup().count()' ) .all() .result()[0] ) msub_interactive = sess.gremlin(msub_graph) sub_person_count = msub_interactive.execute("g.V().count()").all().result()[0] assert person_count == sub_person_count # GNN engine sess.close() def test_multiple_session(data_dir): namespace = "gs-multi-" + "".join( [random.choice(string.ascii_lowercase) for _ in range(6)] ) image = get_gs_image_on_ci_env() sess = graphscope.session( show_log=True, num_workers=1, k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem="2500Mi", k8s_vineyard_cpu=0.1, k8s_vineyard_mem="512Mi", k8s_engine_cpu=0.1, k8s_engine_mem="1500Mi", k8s_vineyard_shared_mem="2Gi", ) info = sess.info assert info["status"] == "active" assert info["type"] == "k8s" assert len(info["engine_hosts"].split(",")) == 1 sess2 = graphscope.session( show_log=True, k8s_namespace=namespace, num_workers=2, k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem="2500Mi", k8s_vineyard_cpu=0.1, k8s_vineyard_mem="512Mi", k8s_engine_cpu=0.1, k8s_engine_mem="1500Mi", k8s_vineyard_shared_mem="2Gi", ) info = sess2.info assert info["status"] == "active" assert info["type"] == "k8s" assert len(info["engine_hosts"].split(",")) == 2 sess2.close() sess.close() def test_query_modern_graph(modern_graph_data_dir): image = get_gs_image_on_ci_env() sess = graphscope.session( show_log=True, num_workers=1, k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem="2500Mi", k8s_vineyard_cpu=0.1, k8s_vineyard_mem="512Mi", k8s_engine_cpu=0.1, k8s_engine_mem="1500Mi", k8s_vineyard_shared_mem="2Gi", ) graph = load_modern_graph(sess, modern_graph_data_dir) interactive = sess.gremlin(graph) queries = [ "g.V().has('name','marko').count()", "g.V().has('person','name','marko').count()", "g.V().has('person','name','marko').outE('created').count()", "g.V().has('person','name','marko').outE('created').inV().count()", "g.V().has('person','name','marko').out('created').count()", "g.V().has('person','name','marko').out('created').values('name').count()", ] for q in queries: result = interactive.execute(q).all().result()[0] assert result == 1 def test_traversal_modern_graph(modern_graph_data_dir): from gremlin_python.process.traversal import Order from gremlin_python.process.traversal import P image = get_gs_image_on_ci_env() sess = graphscope.session( show_log=True, num_workers=1, k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem="2500Mi", k8s_vineyard_cpu=0.1, k8s_vineyard_mem="512Mi", k8s_engine_cpu=0.1, k8s_engine_mem="1500Mi", k8s_vineyard_shared_mem="2Gi", ) graph = load_modern_graph(sess, modern_graph_data_dir) interactive = sess.gremlin(graph) g = interactive.traversal_source() assert g.V().has("name", "marko").count().toList()[0] == 1 assert g.V().has("person", "name", "marko").count().toList()[0] == 1 assert g.V().has("person", "name", "marko").outE("created").count().toList()[0] == 1 assert ( g.V().has("person", "name", "marko").outE("created").inV().count().toList()[0] == 1 ) assert g.V().has("person", "name", "marko").out("created").count().toList()[0] == 1 assert ( g.V() .has("person", "name", "marko") .out("created") .values("name") .count() .toList()[0] == 1 ) assert ( g.V() .hasLabel("person") .has("age", P.gt(30)) .order() .by("age", Order.desc) .count() .toList()[0] == 2 )
[ "graphscope.session", "graphscope.pagerank", "random.choice", "gremlin_python.process.traversal.P.gt", "graphscope.dataset.modern_graph.load_modern_graph", "graphscope.dataset.ldbc.load_ldbc", "graphscope.triangles", "logging.getLogger" ]
[((1000, 1031), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1017, 1031), False, 'import logging\n'), ((1412, 1673), 'graphscope.session', 'graphscope.session', ([], {'show_log': '(True)', 'num_workers': '(1)', 'k8s_gs_image': 'image', 'k8s_coordinator_cpu': '(0.5)', 'k8s_coordinator_mem': '"""2500Mi"""', 'k8s_vineyard_cpu': '(0.1)', 'k8s_vineyard_mem': '"""512Mi"""', 'k8s_engine_cpu': '(0.1)', 'k8s_engine_mem': '"""1500Mi"""', 'k8s_vineyard_shared_mem': '"""2Gi"""'}), "(show_log=True, num_workers=1, k8s_gs_image=image,\n k8s_coordinator_cpu=0.5, k8s_coordinator_mem='2500Mi', k8s_vineyard_cpu\n =0.1, k8s_vineyard_mem='512Mi', k8s_engine_cpu=0.1, k8s_engine_mem=\n '1500Mi', k8s_vineyard_shared_mem='2Gi')\n", (1430, 1673), False, 'import graphscope\n'), ((1759, 1784), 'graphscope.dataset.ldbc.load_ldbc', 'load_ldbc', (['sess', 'data_dir'], {}), '(sess, data_dir)\n', (1768, 1784), False, 'from graphscope.dataset.ldbc import load_ldbc\n'), ((2127, 2167), 'graphscope.pagerank', 'graphscope.pagerank', (['simple_g'], {'delta': '(0.8)'}), '(simple_g, delta=0.8)\n', (2146, 2167), False, 'import graphscope\n'), ((2184, 2214), 'graphscope.triangles', 'graphscope.triangles', (['simple_g'], {}), '(simple_g)\n', (2204, 2214), False, 'import graphscope\n'), ((2590, 2851), 'graphscope.session', 'graphscope.session', ([], {'show_log': '(True)', 'num_workers': '(1)', 'k8s_gs_image': 'image', 'k8s_coordinator_cpu': '(0.5)', 'k8s_coordinator_mem': '"""2500Mi"""', 'k8s_vineyard_cpu': '(0.1)', 'k8s_vineyard_mem': '"""512Mi"""', 'k8s_engine_cpu': '(0.1)', 'k8s_engine_mem': '"""1500Mi"""', 'k8s_vineyard_shared_mem': '"""2Gi"""'}), "(show_log=True, num_workers=1, k8s_gs_image=image,\n k8s_coordinator_cpu=0.5, k8s_coordinator_mem='2500Mi', k8s_vineyard_cpu\n =0.1, k8s_vineyard_mem='512Mi', k8s_engine_cpu=0.1, k8s_engine_mem=\n '1500Mi', k8s_vineyard_shared_mem='2Gi')\n", (2608, 2851), False, 'import graphscope\n'), ((2937, 2962), 'graphscope.dataset.ldbc.load_ldbc', 'load_ldbc', (['sess', 'data_dir'], {}), '(sess, data_dir)\n', (2946, 2962), False, 'from graphscope.dataset.ldbc import load_ldbc\n'), ((3910, 3950), 'graphscope.pagerank', 'graphscope.pagerank', (['simple_g'], {'delta': '(0.8)'}), '(simple_g, delta=0.8)\n', (3929, 3950), False, 'import graphscope\n'), ((3967, 3997), 'graphscope.triangles', 'graphscope.triangles', (['simple_g'], {}), '(simple_g)\n', (3987, 3997), False, 'import graphscope\n'), ((4279, 4325), 'graphscope.dataset.modern_graph.load_modern_graph', 'load_modern_graph', (['sess', 'modern_graph_data_dir'], {}), '(sess, modern_graph_data_dir)\n', (4296, 4325), False, 'from graphscope.dataset.modern_graph import load_modern_graph\n'), ((5090, 5351), 'graphscope.session', 'graphscope.session', ([], {'show_log': '(True)', 'num_workers': '(1)', 'k8s_gs_image': 'image', 'k8s_coordinator_cpu': '(0.5)', 'k8s_coordinator_mem': '"""2500Mi"""', 'k8s_vineyard_cpu': '(0.1)', 'k8s_vineyard_mem': '"""512Mi"""', 'k8s_engine_cpu': '(0.1)', 'k8s_engine_mem': '"""1500Mi"""', 'k8s_vineyard_shared_mem': '"""2Gi"""'}), "(show_log=True, num_workers=1, k8s_gs_image=image,\n k8s_coordinator_cpu=0.5, k8s_coordinator_mem='2500Mi', k8s_vineyard_cpu\n =0.1, k8s_vineyard_mem='512Mi', k8s_engine_cpu=0.1, k8s_engine_mem=\n '1500Mi', k8s_vineyard_shared_mem='2Gi')\n", (5108, 5351), False, 'import graphscope\n'), ((5583, 5868), 'graphscope.session', 'graphscope.session', ([], {'show_log': '(True)', 'k8s_namespace': 'namespace', 'num_workers': '(2)', 'k8s_gs_image': 'image', 'k8s_coordinator_cpu': '(0.5)', 'k8s_coordinator_mem': '"""2500Mi"""', 'k8s_vineyard_cpu': '(0.1)', 'k8s_vineyard_mem': '"""512Mi"""', 'k8s_engine_cpu': '(0.1)', 'k8s_engine_mem': '"""1500Mi"""', 'k8s_vineyard_shared_mem': '"""2Gi"""'}), "(show_log=True, k8s_namespace=namespace, num_workers=2,\n k8s_gs_image=image, k8s_coordinator_cpu=0.5, k8s_coordinator_mem=\n '2500Mi', k8s_vineyard_cpu=0.1, k8s_vineyard_mem='512Mi',\n k8s_engine_cpu=0.1, k8s_engine_mem='1500Mi', k8s_vineyard_shared_mem='2Gi')\n", (5601, 5868), False, 'import graphscope\n'), ((6236, 6497), 'graphscope.session', 'graphscope.session', ([], {'show_log': '(True)', 'num_workers': '(1)', 'k8s_gs_image': 'image', 'k8s_coordinator_cpu': '(0.5)', 'k8s_coordinator_mem': '"""2500Mi"""', 'k8s_vineyard_cpu': '(0.1)', 'k8s_vineyard_mem': '"""512Mi"""', 'k8s_engine_cpu': '(0.1)', 'k8s_engine_mem': '"""1500Mi"""', 'k8s_vineyard_shared_mem': '"""2Gi"""'}), "(show_log=True, num_workers=1, k8s_gs_image=image,\n k8s_coordinator_cpu=0.5, k8s_coordinator_mem='2500Mi', k8s_vineyard_cpu\n =0.1, k8s_vineyard_mem='512Mi', k8s_engine_cpu=0.1, k8s_engine_mem=\n '1500Mi', k8s_vineyard_shared_mem='2Gi')\n", (6254, 6497), False, 'import graphscope\n'), ((6583, 6629), 'graphscope.dataset.modern_graph.load_modern_graph', 'load_modern_graph', (['sess', 'modern_graph_data_dir'], {}), '(sess, modern_graph_data_dir)\n', (6600, 6629), False, 'from graphscope.dataset.modern_graph import load_modern_graph\n'), ((7408, 7669), 'graphscope.session', 'graphscope.session', ([], {'show_log': '(True)', 'num_workers': '(1)', 'k8s_gs_image': 'image', 'k8s_coordinator_cpu': '(0.5)', 'k8s_coordinator_mem': '"""2500Mi"""', 'k8s_vineyard_cpu': '(0.1)', 'k8s_vineyard_mem': '"""512Mi"""', 'k8s_engine_cpu': '(0.1)', 'k8s_engine_mem': '"""1500Mi"""', 'k8s_vineyard_shared_mem': '"""2Gi"""'}), "(show_log=True, num_workers=1, k8s_gs_image=image,\n k8s_coordinator_cpu=0.5, k8s_coordinator_mem='2500Mi', k8s_vineyard_cpu\n =0.1, k8s_vineyard_mem='512Mi', k8s_engine_cpu=0.1, k8s_engine_mem=\n '1500Mi', k8s_vineyard_shared_mem='2Gi')\n", (7426, 7669), False, 'import graphscope\n'), ((7755, 7801), 'graphscope.dataset.modern_graph.load_modern_graph', 'load_modern_graph', (['sess', 'modern_graph_data_dir'], {}), '(sess, modern_graph_data_dir)\n', (7772, 7801), False, 'from graphscope.dataset.modern_graph import load_modern_graph\n'), ((4978, 5015), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (4991, 5015), False, 'import random\n'), ((8558, 8566), 'gremlin_python.process.traversal.P.gt', 'P.gt', (['(30)'], {}), '(30)\n', (8562, 8566), False, 'from gremlin_python.process.traversal import P\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # import io import os import sys import time import pytest from networkx.readwrite.tests.test_gexf import TestGEXF import graphscope.nx as nx from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestGEXF) class TestGEXF: @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="Only test on standalone", ) def test_write_with_node_attributes(self): # Addresses #673. G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 3)]) for i in range(4): G.nodes[i]["id"] = i G.nodes[i]["label"] = i G.nodes[i]["pid"] = i G.nodes[i]["start"] = i G.nodes[i]["end"] = i + 1 if sys.version_info < (3, 8): expected = f"""<gexf version="1.2" xmlns="http://www.gexf.net/1.2\ draft" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:\ schemaLocation="http://www.gexf.net/1.2draft http://www.gexf.net/1.2draft/\ gexf.xsd"> <meta lastmodifieddate="{time.strftime('%Y-%m-%d')}"> <creator>NetworkX {nx.__version__}</creator> </meta> <graph defaultedgetype="undirected" mode="dynamic" name="" timeformat="long"> <nodes> <node end="1" id="0" label="0" pid="0" start="0" /> <node end="2" id="1" label="1" pid="1" start="1" /> <node end="3" id="2" label="2" pid="2" start="2" /> <node end="4" id="3" label="3" pid="3" start="3" /> </nodes> <edges> <edge id="0" source="0" target="1" /> <edge id="1" source="1" target="2" /> <edge id="2" source="2" target="3" /> </edges> </graph> </gexf>""" else: expected = f"""<gexf xmlns="http://www.gexf.net/1.2draft" xmlns:xsi\ ="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=\ "http://www.gexf.net/1.2draft http://www.gexf.net/1.2draft/\ gexf.xsd" version="1.2"> <meta lastmodifieddate="{time.strftime('%Y-%m-%d')}"> <creator>NetworkX {nx.__version__}</creator> </meta> <graph defaultedgetype="undirected" mode="dynamic" name="" timeformat="long"> <nodes> <node id="0" label="0" pid="0" start="0" end="1" /> <node id="1" label="1" pid="1" start="1" end="2" /> <node id="2" label="2" pid="2" start="2" end="3" /> <node id="3" label="3" pid="3" start="3" end="4" /> </nodes> <edges> <edge source="0" target="1" id="0" /> <edge source="1" target="2" id="1" /> <edge source="2" target="3" id="2" /> </edges> </graph> </gexf>""" obtained = "\n".join(nx.generate_gexf(G)) assert expected == obtained @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="Only test on standalone", ) def test_edge_id_construct(self): G = nx.Graph() G.add_edges_from([(0, 1, {"id": 0}), (1, 2, {"id": 2}), (2, 3)]) if sys.version_info < (3, 8): expected = f"""<gexf version="1.2" xmlns="http://www.gexf.net/\ 1.2draft" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:\ schemaLocation="http://www.gexf.net/1.2draft http://www.gexf.net/1.2draft/\ gexf.xsd"> <meta lastmodifieddate="{time.strftime('%Y-%m-%d')}"> <creator>NetworkX {nx.__version__}</creator> </meta> <graph defaultedgetype="undirected" mode="static" name=""> <nodes> <node id="0" label="0" /> <node id="1" label="1" /> <node id="2" label="2" /> <node id="3" label="3" /> </nodes> <edges> <edge id="0" source="0" target="1" /> <edge id="2" source="1" target="2" /> <edge id="1" source="2" target="3" /> </edges> </graph> </gexf>""" else: expected = f"""<gexf xmlns="http://www.gexf.net/1.2draft" xmlns:xsi\ ="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.\ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> <meta lastmodifieddate="{time.strftime('%Y-%m-%d')}"> <creator>NetworkX {nx.__version__}</creator> </meta> <graph defaultedgetype="undirected" mode="static" name=""> <nodes> <node id="0" label="0" /> <node id="1" label="1" /> <node id="2" label="2" /> <node id="3" label="3" /> </nodes> <edges> <edge source="0" target="1" id="0" /> <edge source="1" target="2" id="2" /> <edge source="2" target="3" id="1" /> </edges> </graph> </gexf>""" obtained = "\n".join(nx.generate_gexf(G)) assert expected == obtained @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) != "standalone", reason="Only test on standalone", ) def test_numpy_type(self): G = nx.path_graph(4) try: import numpy except ImportError: return nx.set_node_attributes(G, {n: n for n in numpy.arange(4)}, "number") G[0][1]["edge-number"] = numpy.float64(1.1) if sys.version_info < (3, 8): expected = f"""<gexf version="1.2" xmlns="http://www.gexf.net/1.2draft"\ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation\ ="http://www.gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd"> <meta lastmodifieddate="{time.strftime('%Y-%m-%d')}"> <creator>NetworkX {nx.__version__}</creator> </meta> <graph defaultedgetype="undirected" mode="static" name=""> <attributes class="edge" mode="static"> <attribute id="1" title="edge-number" type="double" /> </attributes> <attributes class="node" mode="static"> <attribute id="0" title="number" type="long" /> </attributes> <nodes> <node id="0" label="0"> <attvalues> <attvalue for="0" value="0" /> </attvalues> </node> <node id="1" label="1"> <attvalues> <attvalue for="0" value="1" /> </attvalues> </node> <node id="2" label="2"> <attvalues> <attvalue for="0" value="2" /> </attvalues> </node> <node id="3" label="3"> <attvalues> <attvalue for="0" value="3" /> </attvalues> </node> </nodes> <edges> <edge id="0" source="0" target="1"> <attvalues> <attvalue for="1" value="1.1" /> </attvalues> </edge> <edge id="1" source="1" target="2" /> <edge id="2" source="2" target="3" /> </edges> </graph> </gexf>""" else: expected = f"""<gexf xmlns="http://www.gexf.net/1.2draft"\ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation\ ="http://www.gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd"\ version="1.2"> <meta lastmodifieddate="{time.strftime('%Y-%m-%d')}"> <creator>NetworkX {nx.__version__}</creator> </meta> <graph defaultedgetype="undirected" mode="static" name=""> <attributes mode="static" class="edge"> <attribute id="1" title="edge-number" type="double" /> </attributes> <attributes mode="static" class="node"> <attribute id="0" title="number" type="long" /> </attributes> <nodes> <node id="0" label="0"> <attvalues> <attvalue for="0" value="0" /> </attvalues> </node> <node id="1" label="1"> <attvalues> <attvalue for="0" value="1" /> </attvalues> </node> <node id="2" label="2"> <attvalues> <attvalue for="0" value="2" /> </attvalues> </node> <node id="3" label="3"> <attvalues> <attvalue for="0" value="3" /> </attvalues> </node> </nodes> <edges> <edge source="0" target="1" id="0"> <attvalues> <attvalue for="1" value="1.1" /> </attvalues> </edge> <edge source="1" target="2" id="1" /> <edge source="2" target="3" id="2" /> </edges> </graph> </gexf>""" obtained = "\n".join(nx.generate_gexf(G)) assert expected == obtained def test_simple_list(self): G = nx.Graph() list_value = [[1, 2, 3], [9, 1, 2]] G.add_node(1, key=list_value) fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert H.nodes[1]["networkx_key"] == list_value
[ "io.BytesIO", "graphscope.nx.utils.compat.with_graphscope_nx_context", "graphscope.nx.write_gexf", "graphscope.nx.generate_gexf", "graphscope.nx.path_graph", "time.strftime", "graphscope.nx.Graph", "os.environ.get", "numpy.arange", "graphscope.nx.read_gexf", "numpy.float64", "pytest.mark.usefixtures" ]
[((611, 656), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (634, 656), False, 'import pytest\n'), ((658, 694), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGEXF'], {}), '(TestGEXF)\n', (684, 694), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((929, 939), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (937, 939), True, 'import graphscope.nx as nx\n'), ((3246, 3256), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (3254, 3256), True, 'import graphscope.nx as nx\n'), ((5127, 5143), 'graphscope.nx.path_graph', 'nx.path_graph', (['(4)'], {}), '(4)\n', (5140, 5143), True, 'import graphscope.nx as nx\n'), ((5339, 5357), 'numpy.float64', 'numpy.float64', (['(1.1)'], {}), '(1.1)\n', (5352, 5357), False, 'import numpy\n'), ((8444, 8454), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (8452, 8454), True, 'import graphscope.nx as nx\n'), ((8550, 8562), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (8560, 8562), False, 'import io\n'), ((8571, 8591), 'graphscope.nx.write_gexf', 'nx.write_gexf', (['G', 'fh'], {}), '(G, fh)\n', (8584, 8591), True, 'import graphscope.nx as nx\n'), ((8623, 8654), 'graphscope.nx.read_gexf', 'nx.read_gexf', (['fh'], {'node_type': 'int'}), '(fh, node_type=int)\n', (8635, 8654), True, 'import graphscope.nx as nx\n'), ((3005, 3024), 'graphscope.nx.generate_gexf', 'nx.generate_gexf', (['G'], {}), '(G)\n', (3021, 3024), True, 'import graphscope.nx as nx\n'), ((744, 778), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (758, 778), False, 'import os\n'), ((4893, 4912), 'graphscope.nx.generate_gexf', 'nx.generate_gexf', (['G'], {}), '(G)\n', (4909, 4912), True, 'import graphscope.nx as nx\n'), ((3096, 3130), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (3110, 3130), False, 'import os\n'), ((8342, 8361), 'graphscope.nx.generate_gexf', 'nx.generate_gexf', (['G'], {}), '(G)\n', (8358, 8361), True, 'import graphscope.nx as nx\n'), ((4984, 5018), 'os.environ.get', 'os.environ.get', (['"""DEPLOYMENT"""', 'None'], {}), "('DEPLOYMENT', None)\n", (4998, 5018), False, 'import os\n'), ((1518, 1543), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (1531, 1543), False, 'import time\n'), ((2396, 2421), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (2409, 2421), False, 'import time\n'), ((3653, 3678), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (3666, 3678), False, 'import time\n'), ((4406, 4431), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (4419, 4431), False, 'import time\n'), ((5278, 5293), 'numpy.arange', 'numpy.arange', (['(4)'], {}), '(4)\n', (5290, 5293), False, 'import numpy\n'), ((5679, 5704), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (5692, 5704), False, 'import time\n'), ((7145, 7170), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (7158, 7170), False, 'import time\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020-2021 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import os import random import string import subprocess import sys import numpy as np import pytest import graphscope from graphscope.config import GSConfig as gs_config from graphscope.dataset.ldbc import load_ldbc from graphscope.dataset.modern_graph import load_modern_graph from graphscope.framework.graph import Graph from graphscope.framework.loader import Loader graphscope.set_option(show_log=True) logger = logging.getLogger("graphscope") def get_k8s_volumes(): k8s_volumes = { "data": { "type": "hostPath", "field": {"path": os.environ["GS_TEST_DIR"], "type": "Directory"}, "mounts": {"mountPath": "/testingdata"}, } } return k8s_volumes def get_gs_image_on_ci_env(): if "GS_IMAGE" in os.environ and "GIE_MANAGER_IMAGE" in os.environ: return os.environ["GS_IMAGE"], os.environ["GIE_MANAGER_IMAGE"] else: return gs_config.k8s_gs_image, gs_config.k8s_gie_graph_manager_image @pytest.fixture def gs_session(): gs_image, gie_manager_image = get_gs_image_on_ci_env() sess = graphscope.session( num_workers=1, k8s_gs_image=gs_image, k8s_gie_graph_manager_image=gie_manager_image, k8s_coordinator_cpu=2, k8s_coordinator_mem="4Gi", k8s_vineyard_cpu=2, k8s_vineyard_mem="512Mi", k8s_engine_cpu=2, k8s_engine_mem="4Gi", k8s_etcd_cpu=2, k8s_etcd_mem="256Mi", vineyard_shared_mem="4Gi", k8s_volumes=get_k8s_volumes(), with_mars=True, # enable mars ) yield sess sess.close() def test_mars_session(gs_session): from mars import dataframe as md from mars import tensor as mt from mars.session import new_session ep = gs_session.engine_config["mars_endpoint"] mars_session = new_session(ep).as_default() tensor = mt.ones((4, 5, 6)) mt.to_vineyard(tensor).execute(session=mars_session).fetch(session=mars_session)
[ "graphscope.set_option", "mars.tensor.ones", "mars.session.new_session", "mars.tensor.to_vineyard", "logging.getLogger" ]
[((1060, 1096), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (1081, 1096), False, 'import graphscope\n'), ((1106, 1137), 'logging.getLogger', 'logging.getLogger', (['"""graphscope"""'], {}), "('graphscope')\n", (1123, 1137), False, 'import logging\n'), ((2552, 2570), 'mars.tensor.ones', 'mt.ones', (['(4, 5, 6)'], {}), '((4, 5, 6))\n', (2559, 2570), True, 'from mars import tensor as mt\n'), ((2509, 2524), 'mars.session.new_session', 'new_session', (['ep'], {}), '(ep)\n', (2520, 2524), False, 'from mars.session import new_session\n'), ((2575, 2597), 'mars.tensor.to_vineyard', 'mt.to_vineyard', (['tensor'], {}), '(tensor)\n', (2589, 2597), True, 'from mars import tensor as mt\n')]
import networkx.algorithms.tests.test_distance_measures import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.tests.test_distance_measures, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.tests.test_distance_measures import TestDistance from networkx.generators.lattice import grid_2d_graph @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestDistance) class TestDistance: def setup_method(self): # NB: graphscope.nx does not support grid_2d_graph(which use tuple as node) # we use a tricky way to replace it. H = cnlti(grid_2d_graph(4, 4), first_label=1, ordering="sorted") G = nx.Graph(H) self.G = G
[ "networkx.generators.lattice.grid_2d_graph", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((489, 534), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (512, 534), False, 'import pytest\n'), ((536, 576), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestDistance'], {}), '(TestDistance)\n', (562, 576), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((310, 355), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (333, 355), False, 'import pytest\n'), ((772, 791), 'networkx.generators.lattice.grid_2d_graph', 'grid_2d_graph', (['(4)', '(4)'], {}), '(4, 4)\n', (785, 791), False, 'from networkx.generators.lattice import grid_2d_graph\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is referred and derived from project NetworkX # # which has the following license: # # Copyright (C) 2004-2020, NetworkX Developers # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # All rights reserved. # # This file is part of NetworkX. # # NetworkX is distributed under a BSD license; see LICENSE.txt for more # information. # # fmt: off import pytest from networkx.classes.tests.test_function import \ TestCommonNeighbors as _TestCommonNeighbors from networkx.classes.tests.test_function import TestFunction as _TestFunction from networkx.testing import assert_edges_equal from networkx.testing import assert_nodes_equal from graphscope import nx # fmt: on @pytest.mark.usefixtures("graphscope_session") class TestFunction(_TestFunction): def setup_method(self): self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") self.Gdegree = {0: 3, 1: 2, 2: 2, 3: 1, 4: 0} self.Gnodes = list(range(5)) self.Gedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] self.DG = nx.DiGraph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}) self.DGin_degree = {0: 1, 1: 2, 2: 2, 3: 1, 4: 0} self.DGout_degree = {0: 3, 1: 3, 2: 0, 3: 0, 4: 0} self.DGnodes = list(range(5)) self.DGedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] class TestCommonNeighbors(_TestCommonNeighbors): @classmethod def setup_class(cls): cls.func = staticmethod(nx.common_neighbors) def test_func(G, u, v, expected): result = sorted(cls.func(G, u, v)) assert result == expected cls.test = staticmethod(test_func) def test_K5(self): G = nx.complete_graph(5) self.test(G, 0, 1, [2, 3, 4]) def test_P3(self): G = nx.path_graph(3) self.test(G, 0, 2, [1]) def test_S4(self): G = nx.star_graph(4) self.test(G, 1, 2, [0]) def test_digraph(self): with pytest.raises(nx.NetworkXNotImplemented): G = nx.DiGraph() G.add_edges_from([(0, 1), (1, 2)]) self.func(G, 0, 2) def test_nonexistent_nodes(self): G = nx.complete_graph(5) pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 4) pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 4, 5) pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 6) def test_custom1(self): """Case of no common neighbors.""" G = nx.Graph() G.add_nodes_from([0, 1]) self.test(G, 0, 1, []) def test_custom2(self): """Case of equal nodes.""" G = nx.complete_graph(4) self.test(G, 0, 0, [1, 2, 3]) def test_is_empty(): graphs = [nx.Graph(), nx.DiGraph()] for G in graphs: assert nx.is_empty(G) G.add_nodes_from(range(5)) assert nx.is_empty(G) G.add_edges_from([(1, 2), (3, 4)]) assert not nx.is_empty(G) def test_selfloops(): graphs = [nx.Graph(), nx.DiGraph()] for graph in graphs: G = nx.complete_graph(3, create_using=graph) G.add_edge(0, 0) assert_nodes_equal(nx.nodes_with_selfloops(G), [0]) assert_edges_equal(nx.selfloop_edges(G), [(0, 0)]) assert_edges_equal(nx.selfloop_edges(G, data=True), [(0, 0, {})]) assert nx.number_of_selfloops(G) == 1 # test selfloop attr G.add_edge(1, 1, weight=2) assert_edges_equal( nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] ) assert_edges_equal( nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] ) # test removing selfloops behavior vis-a-vis altering a dict while iterating G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G)) if G.is_multigraph(): G.add_edge(0, 0) pytest.raises( RuntimeError, G.remove_edges_from, nx.selfloop_edges(G, keys=True) ) G.add_edge(0, 0) pytest.raises( TypeError, G.remove_edges_from, nx.selfloop_edges(G, data=True) ) G.add_edge(0, 0) pytest.raises( RuntimeError, G.remove_edges_from, nx.selfloop_edges(G, data=True, keys=True), ) else: G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G, keys=True)) G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G, data=True)) G.add_edge(0, 0) G.remove_edges_from(nx.selfloop_edges(G, keys=True, data=True))
[ "graphscope.nx.DiGraph", "graphscope.nx.selfloop_edges", "graphscope.nx.nodes_with_selfloops", "graphscope.nx.is_empty", "graphscope.nx.path_graph", "graphscope.nx.number_of_selfloops", "graphscope.nx.complete_graph", "graphscope.nx.Graph", "graphscope.nx.star_graph", "pytest.raises", "pytest.mark.usefixtures" ]
[((740, 785), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (763, 785), False, 'import pytest\n'), ((866, 930), 'graphscope.nx.Graph', 'nx.Graph', (['{(0): [1, 2, 3], (1): [1, 2, 0], (4): []}'], {'name': '"""Test"""'}), "({(0): [1, 2, 3], (1): [1, 2, 0], (4): []}, name='Test')\n", (874, 930), False, 'from graphscope import nx\n'), ((1105, 1158), 'graphscope.nx.DiGraph', 'nx.DiGraph', (['{(0): [1, 2, 3], (1): [1, 2, 0], (4): []}'], {}), '({(0): [1, 2, 3], (1): [1, 2, 0], (4): []})\n', (1115, 1158), False, 'from graphscope import nx\n'), ((1735, 1755), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(5)'], {}), '(5)\n', (1752, 1755), False, 'from graphscope import nx\n'), ((1830, 1846), 'graphscope.nx.path_graph', 'nx.path_graph', (['(3)'], {}), '(3)\n', (1843, 1846), False, 'from graphscope import nx\n'), ((1915, 1931), 'graphscope.nx.star_graph', 'nx.star_graph', (['(4)'], {}), '(4)\n', (1928, 1931), False, 'from graphscope import nx\n'), ((2206, 2226), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(5)'], {}), '(5)\n', (2223, 2226), False, 'from graphscope import nx\n'), ((2235, 2296), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.common_neighbors', 'G', '(5)', '(4)'], {}), '(nx.NetworkXError, nx.common_neighbors, G, 5, 4)\n', (2248, 2296), False, 'import pytest\n'), ((2305, 2366), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.common_neighbors', 'G', '(4)', '(5)'], {}), '(nx.NetworkXError, nx.common_neighbors, G, 4, 5)\n', (2318, 2366), False, 'import pytest\n'), ((2375, 2436), 'pytest.raises', 'pytest.raises', (['nx.NetworkXError', 'nx.common_neighbors', 'G', '(5)', '(6)'], {}), '(nx.NetworkXError, nx.common_neighbors, G, 5, 6)\n', (2388, 2436), False, 'import pytest\n'), ((2521, 2531), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (2529, 2531), False, 'from graphscope import nx\n'), ((2672, 2692), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(4)'], {}), '(4)\n', (2689, 2692), False, 'from graphscope import nx\n'), ((2768, 2778), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (2776, 2778), False, 'from graphscope import nx\n'), ((2780, 2792), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2790, 2792), False, 'from graphscope import nx\n'), ((2830, 2844), 'graphscope.nx.is_empty', 'nx.is_empty', (['G'], {}), '(G)\n', (2841, 2844), False, 'from graphscope import nx\n'), ((2895, 2909), 'graphscope.nx.is_empty', 'nx.is_empty', (['G'], {}), '(G)\n', (2906, 2909), False, 'from graphscope import nx\n'), ((3025, 3035), 'graphscope.nx.Graph', 'nx.Graph', ([], {}), '()\n', (3033, 3035), False, 'from graphscope import nx\n'), ((3037, 3049), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (3047, 3049), False, 'from graphscope import nx\n'), ((3088, 3128), 'graphscope.nx.complete_graph', 'nx.complete_graph', (['(3)'], {'create_using': 'graph'}), '(3, create_using=graph)\n', (3105, 3128), False, 'from graphscope import nx\n'), ((2006, 2046), 'pytest.raises', 'pytest.raises', (['nx.NetworkXNotImplemented'], {}), '(nx.NetworkXNotImplemented)\n', (2019, 2046), False, 'import pytest\n'), ((2064, 2076), 'graphscope.nx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2074, 2076), False, 'from graphscope import nx\n'), ((2972, 2986), 'graphscope.nx.is_empty', 'nx.is_empty', (['G'], {}), '(G)\n', (2983, 2986), False, 'from graphscope import nx\n'), ((3181, 3207), 'graphscope.nx.nodes_with_selfloops', 'nx.nodes_with_selfloops', (['G'], {}), '(G)\n', (3204, 3207), False, 'from graphscope import nx\n'), ((3241, 3261), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {}), '(G)\n', (3258, 3261), False, 'from graphscope import nx\n'), ((3300, 3331), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (3317, 3331), False, 'from graphscope import nx\n'), ((3362, 3387), 'graphscope.nx.number_of_selfloops', 'nx.number_of_selfloops', (['G'], {}), '(G)\n', (3384, 3387), False, 'from graphscope import nx\n'), ((3497, 3528), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (3514, 3528), False, 'from graphscope import nx\n'), ((3616, 3651), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '"""weight"""'}), "(G, data='weight')\n", (3633, 3651), False, 'from graphscope import nx\n'), ((3827, 3847), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {}), '(G)\n', (3844, 3847), False, 'from graphscope import nx\n'), ((3986, 4017), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'keys': '(True)'}), '(G, keys=True)\n', (4003, 4017), False, 'from graphscope import nx\n'), ((4136, 4167), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (4153, 4167), False, 'from graphscope import nx\n'), ((4321, 4363), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)', 'keys': '(True)'}), '(G, data=True, keys=True)\n', (4338, 4363), False, 'from graphscope import nx\n'), ((4454, 4485), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'keys': '(True)'}), '(G, keys=True)\n', (4471, 4485), False, 'from graphscope import nx\n'), ((4548, 4579), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'data': '(True)'}), '(G, data=True)\n', (4565, 4579), False, 'from graphscope import nx\n'), ((4642, 4684), 'graphscope.nx.selfloop_edges', 'nx.selfloop_edges', (['G'], {'keys': '(True)', 'data': '(True)'}), '(G, keys=True, data=True)\n', (4659, 4684), False, 'from graphscope import nx\n')]
import networkx.algorithms.bipartite.tests.test_edgelist import pytest from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.bipartite.tests.test_edgelist, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.bipartite.tests.test_edgelist import TestEdgelist @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestEdgelist) class TestEdgelist(): @classmethod def setup_class(cls): cls.G = nx.Graph(name="test") e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] cls.G.add_edges_from(e) cls.G.add_nodes_from(['a', 'c', 'e'], bipartite=0) cls.G.add_nodes_from(['b', 'd', 'f'], bipartite=1) cls.G.add_node('g', bipartite=0) cls.DG = nx.DiGraph(cls.G) @pytest.mark.skip(reason="str(e) not same with networkx") def test_write_edgelist_3(self): pass @pytest.mark.skip(reason="str(e) not same with networkx") @pytest.mark.skip(reason="not support multigraph") def test_write_edgelist_4(self): pass @pytest.mark.skip(reason="not support multigraph") def test_edgelist_multigraph(self): pass
[ "graphscope.experimental.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.skip", "pytest.mark.usefixtures" ]
[((464, 509), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (487, 509), False, 'import pytest\n'), ((511, 551), 'graphscope.experimental.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestEdgelist'], {}), '(TestEdgelist)\n', (537, 551), False, 'from graphscope.experimental.nx.utils.compat import with_graphscope_nx_context\n'), ((972, 1028), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""str(e) not same with networkx"""'}), "(reason='str(e) not same with networkx')\n", (988, 1028), False, 'import pytest\n'), ((1085, 1141), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""str(e) not same with networkx"""'}), "(reason='str(e) not same with networkx')\n", (1101, 1141), False, 'import pytest\n'), ((1147, 1196), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1163, 1196), False, 'import pytest\n'), ((1253, 1302), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""not support multigraph"""'}), "(reason='not support multigraph')\n", (1269, 1302), False, 'import pytest\n'), ((338, 383), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (361, 383), False, 'import pytest\n')]
import pytest from networkx.algorithms.centrality.tests.test_degree_centrality import \ TestDegreeCentrality from graphscope.nx.utils.compat import with_graphscope_nx_context @pytest.mark.usefixtures("graphscope_session") @with_graphscope_nx_context(TestDegreeCentrality) class TestDegreeCentrality: @pytest.mark.skip(reason="FIXME(@weibin):context support select from empty graph") def test_small_graph_centrality(self): G = nx.empty_graph(create_using=nx.DiGraph) assert {} == nx.degree_centrality(G) assert {} == nx.out_degree_centrality(G) assert {} == nx.in_degree_centrality(G) G = nx.empty_graph(1, create_using=nx.DiGraph) assert {0: 1} == nx.degree_centrality(G) assert {0: 1} == nx.out_degree_centrality(G) assert {0: 1} == nx.in_degree_centrality(G)
[ "pytest.mark.skip", "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((183, 228), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (206, 228), False, 'import pytest\n'), ((230, 278), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestDegreeCentrality'], {}), '(TestDegreeCentrality)\n', (256, 278), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((312, 398), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""FIXME(@weibin):context support select from empty graph"""'}), "(reason=\n 'FIXME(@weibin):context support select from empty graph')\n", (328, 398), False, 'import pytest\n')]
import os import pytest import graphscope @pytest.fixture(scope="session") def graphscope_session(): graphscope.set_option(show_log=True) graphscope.set_option(initializing_interactive_engine=False) sess = graphscope.session(cluster_type="hosts", num_workers=1) sess.as_default() yield sess sess.close()
[ "graphscope.set_option", "graphscope.session", "pytest.fixture" ]
[((47, 78), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (61, 78), False, 'import pytest\n'), ((109, 145), 'graphscope.set_option', 'graphscope.set_option', ([], {'show_log': '(True)'}), '(show_log=True)\n', (130, 145), False, 'import graphscope\n'), ((150, 210), 'graphscope.set_option', 'graphscope.set_option', ([], {'initializing_interactive_engine': '(False)'}), '(initializing_interactive_engine=False)\n', (171, 210), False, 'import graphscope\n'), ((223, 278), 'graphscope.session', 'graphscope.session', ([], {'cluster_type': '"""hosts"""', 'num_workers': '(1)'}), "(cluster_type='hosts', num_workers=1)\n", (241, 278), False, 'import graphscope\n')]
import networkx.algorithms.flow.tests.test_gomory_hu import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx from graphscope.nx.utils.compat import with_graphscope_nx_context import_as_graphscope_nx(networkx.algorithms.flow.tests.test_gomory_hu, decorators=pytest.mark.usefixtures("graphscope_session")) from networkx.algorithms.flow.tests.test_gomory_hu import TestGomoryHuTree @pytest.mark.usefixtures("graphscope_session") @pytest.mark.slow @with_graphscope_nx_context(TestGomoryHuTree) class TestGomoryHuTree: pass
[ "graphscope.nx.utils.compat.with_graphscope_nx_context", "pytest.mark.usefixtures" ]
[((431, 476), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (454, 476), False, 'import pytest\n'), ((496, 540), 'graphscope.nx.utils.compat.with_graphscope_nx_context', 'with_graphscope_nx_context', (['TestGomoryHuTree'], {}), '(TestGomoryHuTree)\n', (522, 540), False, 'from graphscope.nx.utils.compat import with_graphscope_nx_context\n'), ((304, 349), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""graphscope_session"""'], {}), "('graphscope_session')\n", (327, 349), False, 'import pytest\n')]