index
int64
0
0
repo_id
stringlengths
26
205
file_path
stringlengths
51
246
content
stringlengths
8
433k
__index_level_0__
int64
0
10k
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/NeptuneGremlinClient.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazon.neptune.gremlin.driver.sigv4.ChainedSigV4PropertiesProvider; import com.amazonaws.neptune.auth.NeptuneNettyHttpSigV4Signer; import com.amazonaws.neptune.auth.NeptuneSigV4SignerException; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.cluster.ConnectionConfig; import com.amazonaws.services.neptune.propertygraph.io.SerializationConfig; import org.apache.tinkerpop.gremlin.driver.*; import org.apache.tinkerpop.gremlin.driver.Cluster.Builder; import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection; import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NeptuneGremlinClient implements AutoCloseable { public static final int DEFAULT_BATCH_SIZE = 64; private static final Logger logger = LoggerFactory.getLogger(NeptuneGremlinClient.class); public static NeptuneGremlinClient create(Cluster cluster, SerializationConfig serializationConfig) { ConnectionConfig connectionConfig = cluster.connectionConfig(); ConcurrencyConfig concurrencyConfig = cluster.concurrencyConfig(); if (!connectionConfig.useSsl()){ logger.warn("SSL has been disabled"); } Builder builder = org.apache.tinkerpop.gremlin.driver.Cluster.build() .port(connectionConfig.port()) .enableSsl(connectionConfig.useSsl()) .maxWaitForConnection(10000); builder = serializationConfig.apply(builder); if (connectionConfig.useIamAuth()) { builder = configureIamSigning(builder, connectionConfig); } for (String endpoint : connectionConfig.endpoints()) { builder = builder.addContactPoint(endpoint); } int numberOfEndpoints = connectionConfig.endpoints().size(); return new NeptuneGremlinClient(concurrencyConfig.applyTo(builder, numberOfEndpoints).create()); } protected static Builder configureIamSigning (Builder builder, ConnectionConfig connectionConfig) { if (connectionConfig.isDirectConnection()) { builder = builder.handshakeInterceptor( r -> { try { NeptuneNettyHttpSigV4Signer sigV4Signer = new NeptuneNettyHttpSigV4Signer( new ChainedSigV4PropertiesProvider().getSigV4Properties().getServiceRegion(), connectionConfig.getCredentialsProvider()); sigV4Signer.signRequest(r); } catch (NeptuneSigV4SignerException e) { throw new RuntimeException("Exception occurred while signing the request", e); } return r; } ); } else { builder = builder // use the JAAS_ENTRY auth property to pass Host header info to the channelizer .authProperties(new AuthProperties().with(AuthProperties.Property.JAAS_ENTRY, connectionConfig.handshakeRequestConfig().value())) .channelizer(LBAwareSigV4WebSocketChannelizer.class); } return builder; } private final org.apache.tinkerpop.gremlin.driver.Cluster cluster; private NeptuneGremlinClient(org.apache.tinkerpop.gremlin.driver.Cluster cluster) { this.cluster = cluster; } public GraphTraversalSource newTraversalSource() { return AnonymousTraversalSource.traversal().withRemote(DriverRemoteConnection.using(cluster)); } public QueryClient queryClient() { return new QueryClient(cluster.connect()); } @Override public void close() throws Exception { if (cluster != null && !cluster.isClosed() && !cluster.isClosing()) { cluster.close(); } } public static class QueryClient implements AutoCloseable { private final Client client; QueryClient(Client client) { this.client = client; } public ResultSet submit(String gremlin, Long timeoutMillis) { if (timeoutMillis != null){ return client.submit(gremlin, RequestOptions.build().timeout(timeoutMillis).create()); } else { return client.submit(gremlin); } } @Override public void close() throws Exception { client.close(); } } }
1,000
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/NamedQueries.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import java.util.*; public class NamedQueries { public static NamedQueries fromJson(JsonNode json) { String name = json.path("name").textValue(); if (json.has("query")){ String query = json.path("query").textValue(); return new NamedQueries(name, Collections.singletonList(query)); } else { ArrayNode queries = (ArrayNode) json.path("queries"); List<String> collection = new ArrayList<>(); for (JsonNode query : queries) { collection.add(query.textValue()); } return new NamedQueries(name, collection); } } private final String name; private final Collection<String> queries; public NamedQueries(String name, Collection<String> queries) { this.name = name; this.queries = queries; } public String name() { return name; } public Collection<String> queries() { return queries; } public void addTo(Collection<NamedQuery> namedQueries) { for (String query : queries) { namedQueries.add(new NamedQuery(name, query)); } } public ArrayNode toJson() { ArrayNode json = JsonNodeFactory.instance.arrayNode(); for (String query : queries) { json.add(query); } return json; } }
1,001
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/EdgeLabelStrategy.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.commons.lang3.ArrayUtils; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Element; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.*; public enum EdgeLabelStrategy implements LabelStrategy { edgeLabelsOnly { @Override public Collection<Label> getLabels(GraphTraversalSource g) { // Using dedup can cause MemoryLimitExceededException on large datasets, so do the dedup in the set GraphTraversal<Edge, String> traversal = g.E().label(); logger.info(GremlinQueryDebugger.queryAsString(traversal)); Set<Label> labels = new HashSet<>(); traversal.forEachRemaining(r -> labels.add(new Label(r))); return labels; } @Override public Label getLabelFor(Map<String, Object> input) { return new Label(input.get("~label").toString()); } @Override public Label getLabelFor(PGResult input) { return new Label(input.getLabel()); } @Override public String[] additionalColumns(String... columns) { return columns; } @Override public <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t) { return t; } }, edgeAndVertexLabels { @Override public Collection<Label> getLabels(GraphTraversalSource g) { // Using dedup can cause MemoryLimitExceededException on large datasets, so do the dedup in the set GraphTraversal<Edge, Map<String, Object>> traversal = g.E() .project("~fromLabels", "~label", "~toLabels") .by(outV().label().fold()) .by(label()) .by(inV().label().fold()); logger.info(GremlinQueryDebugger.queryAsString(traversal)); Set<Label> labels = new HashSet<>(); traversal.forEachRemaining(r -> { labels.add(getLabelFor(r)); }); return labels; } @Override public Label getLabelFor(Map<String, Object> input) { @SuppressWarnings("unchecked") Collection<String> fromLabels = (Collection<String>) input.get("~fromLabels"); String label = String.valueOf(input.get("~label")); @SuppressWarnings("unchecked") Collection<String> toLabels = (Collection<String>) input.get("~toLabels"); return new Label(label, fromLabels, toLabels); } @Override public Label getLabelFor(PGResult input) { Collection<String> fromLabels = input.getFromLabels(); String label = input.getLabel().get(0); Collection<String> toLabels = input.getToLabels(); return new Label(label, fromLabels, toLabels); } @Override public String[] additionalColumns(String... columns) { return ArrayUtils.addAll(columns, "~fromLabels", "~toLabels"); } @Override public <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t) { return t.by(outV().label().fold()).by(inV().label().fold()); } }; private static final Logger logger = LoggerFactory.getLogger(EdgeLabelStrategy.class); }
1,002
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/SchemaSamplingSpecification.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.propertygraph.schema.CreateGraphSchemaCommand; import com.amazonaws.services.neptune.propertygraph.schema.CreateGraphSchemaFromSample; import com.amazonaws.services.neptune.propertygraph.schema.CreateGraphSchemaFromScan; import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Collection; public class SchemaSamplingSpecification { private final boolean sample; private final long sampleSize; public SchemaSamplingSpecification(boolean sample, long sampleSize) { this.sample = sample; this.sampleSize = sampleSize; } public CreateGraphSchemaCommand createSchemaCommand(Collection<ExportSpecification> exportSpecifications, GraphTraversalSource g) { if (sample) { return new CreateGraphSchemaFromSample(exportSpecifications, g, sampleSize); } else { return new CreateGraphSchemaFromScan(exportSpecifications, g); } } }
1,003
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/AllLabels.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Element; import java.util.*; public class AllLabels implements LabelsFilter { private final LabelStrategy labelStrategy; public AllLabels(LabelStrategy labelStrategy) { this.labelStrategy = labelStrategy; } @Override public GraphTraversal<? extends Element, ?> apply(GraphTraversal<? extends Element, ?> traversal, FeatureToggles featureToggles, GraphElementType graphElementType) { return traversal; } @Override public Collection<Label> getLabelsUsing(GraphClient<?> graphClient) { return graphClient.labels(labelStrategy); } @Override public String[] getPropertiesForLabels(GraphElementSchemas graphElementSchemas) { Set<String> properties = new HashSet<>(); Iterable<Label> labels = graphElementSchemas.labels(); for (Label label : labels) { LabelSchema labelSchema = graphElementSchemas.getSchemaFor(label); for (PropertySchema propertySchema : labelSchema.propertySchemas()) { properties.add(propertySchema.nameWithoutDataType()); } } return properties.toArray(new String[]{}); } @Override public Label getLabelFor(Map<String, Object> input) { return labelStrategy.getLabelFor(input); } @Override public Label getLabelFor(PGResult input) { return labelStrategy.getLabelFor(input); } @Override public String[] addAdditionalColumnNames(String... columns) { return labelStrategy.additionalColumns(columns); } @Override public <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t) { return labelStrategy.addAdditionalColumns(t); } @Override public LabelsFilter filterFor(Label label) { return new SpecifiedLabels(Collections.singletonList(label), labelStrategy); } @Override public LabelsFilter intersection(Collection<Label> labels) { return new SpecifiedLabels(labels, labelStrategy); } @Override public boolean isEmpty() { return false; } @Override public String description(String element) { return String.format("all %s", element); } @Override public Collection<LabelsFilter> split() { return Collections.singletonList(this); } }
1,004
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/NodeLabelStrategy.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; public enum NodeLabelStrategy implements LabelStrategy { nodeLabelsOnly { @Override public Collection<Label> getLabels(GraphTraversalSource g) { // Using dedup can cause MemoryLimitExceededException on large datasets, so do the dedup in the set GraphTraversal<Vertex, String> traversal = g.V().label(); logger.info(GremlinQueryDebugger.queryAsString(traversal)); Set<Label> labels = new HashSet<>(); traversal.forEachRemaining(r -> labels.add(new Label(r))); return labels; } @Override public Label getLabelFor(Map<String, Object> input) { @SuppressWarnings("unchecked") List<String> labels = (List<String>) input.get("~label"); labels = Label.fixLabelsIssue(labels); return new Label(labels); } @Override public Label getLabelFor(PGResult input) { List<String> labels = input.getLabel(); labels = Label.fixLabelsIssue(labels); return new Label(labels); } @Override public String[] additionalColumns(String... columns) { return columns; } @Override public <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t) { return t; } }; private static final Logger logger = LoggerFactory.getLogger(NodeLabelStrategy.class); }
1,005
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/ExportStats.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.propertygraph.io.Jsonizable; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; public class ExportStats implements Jsonizable<GraphSchema> { private long nodeCount = 0; private long edgeCount = 0; private final ConcurrentHashMap<Label, LabelStats> nodeStats = new ConcurrentHashMap<>(); private final ConcurrentHashMap<Label, LabelStats> edgeStats = new ConcurrentHashMap<>(); public void setNodeCount(long value) { nodeCount = value; } public void setEdgeCount(long value) { edgeCount = value; } public void incrementNodeStats(Label label) { nodeStats.computeIfAbsent(label, LabelStats::new).increment(); } public void incrementEdgeStats(Label label) { edgeStats.computeIfAbsent(label, LabelStats::new).increment(); } public String formatStats(GraphSchema graphSchema) { StringBuilder sb = new StringBuilder(); sb.append("Source:").append(System.lineSeparator()); sb.append(" Nodes: ").append(nodeCount).append(System.lineSeparator()); sb.append(" Edges: ").append(edgeCount).append(System.lineSeparator()); sb.append("Export:").append(System.lineSeparator()); sb.append(" Nodes: ").append(nodeStats.values().stream().map(LabelStats::count).reduce(0L, Long::sum)).append(System.lineSeparator()); sb.append(" Edges: ").append(edgeStats.values().stream().map(LabelStats::count).reduce(0L, Long::sum)).append(System.lineSeparator()); sb.append(" Properties: ").append(getNumberOfProperties(graphSchema)).append(System.lineSeparator()); sb.append("Details:").append(System.lineSeparator()); sb.append(" Nodes: ").append(System.lineSeparator()); GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes); for (Map.Entry<Label, LabelStats> entry : nodeStats.entrySet()) { Label label = entry.getKey(); LabelStats labelStats = entry.getValue(); LabelSchema labelSchema = nodeSchemas.getSchemaFor(label); sb.append(" ").append(labelStats.toString()).append(System.lineSeparator()); for (PropertySchemaStats stats : labelSchema.propertySchemaStats()) { sb.append(" |_ ").append(stats.toString()).append(System.lineSeparator()); } } sb.append(" Edges: ").append(System.lineSeparator()); GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges); for (Map.Entry<Label, LabelStats> entry : edgeStats.entrySet()) { Label label = entry.getKey(); LabelStats labelStats = entry.getValue(); LabelSchema labelSchema = edgeSchemas.getSchemaFor(label); sb.append(" ").append(labelStats.toString()).append(System.lineSeparator()); for (PropertySchemaStats stats : labelSchema.propertySchemaStats()) { sb.append(" |_ ").append(stats.toString()).append(System.lineSeparator()); } } return sb.toString(); } private Long getNumberOfProperties(GraphSchema graphSchema) { return graphSchema.graphElementSchemas().stream() .map(s -> s.labelSchemas().stream() .map(l -> l.propertySchemaStats().stream() .map(p -> (long) p.observationCount()).reduce(0L, Long::sum)) .reduce(0L, Long::sum)) .reduce(0L, Long::sum); } public void addTo(ObjectNode rootNode, GraphSchema graphSchema) { ObjectNode statsNode = JsonNodeFactory.instance.objectNode(); rootNode.set("stats", statsNode); statsNode.put("nodes", nodeStats.values().stream().map(LabelStats::count).reduce(0L, Long::sum)); statsNode.put("edges", edgeStats.values().stream().map(LabelStats::count).reduce(0L, Long::sum)); statsNode.put("properties", getNumberOfProperties(graphSchema)); ObjectNode detailsNode = JsonNodeFactory.instance.objectNode(); statsNode.set("details", detailsNode); ArrayNode nodesArrayNode = JsonNodeFactory.instance.arrayNode(); detailsNode.set("nodes", nodesArrayNode); GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes); for (Map.Entry<Label, LabelStats> entry : nodeStats.entrySet()) { Label label = entry.getKey(); LabelStats labelStats = entry.getValue(); LabelSchema labelSchema = nodeSchemas.getSchemaFor(label); ObjectNode nodeNode = JsonNodeFactory.instance.objectNode(); nodesArrayNode.add(nodeNode); nodeNode.put("description", label.fullyQualifiedLabel()); nodeNode.set("labels", arrayNodeFromList(label.labels())); nodeNode.put("count", labelStats.count()); ArrayNode propertiesArray = JsonNodeFactory.instance.arrayNode(); for (PropertySchemaStats stats : labelSchema.propertySchemaStats()) { PropertySchema propertySchema = labelSchema.getPropertySchema(stats.property()); ObjectNode propertyNode = JsonNodeFactory.instance.objectNode(); propertyNode.put("name", stats.property().toString()); propertyNode.put("count", stats.observationCount()); propertyNode.put("numberOfRecords", stats.numberValuesCount()); propertyNode.put("minCardinality", stats.minCardinality()); propertyNode.put("maxCardinality", stats.maxCardinality()); propertyNode.put("isNullable", propertySchema.isNullable()); ObjectNode dataTypesNode = JsonNodeFactory.instance.objectNode(); ArrayNode dataTypeCountsNode = JsonNodeFactory.instance.arrayNode(); for (Map.Entry<DataType, Integer> e : stats.dataTypeCounts().entrySet()) { ObjectNode n = JsonNodeFactory.instance.objectNode(); n.put(e.getKey().name(), e.getValue()); dataTypeCountsNode.add(n); } dataTypesNode.put("inferred", propertySchema.dataType().name()); dataTypesNode.set("counts", dataTypeCountsNode); propertyNode.set("dataTypes", dataTypesNode); propertiesArray.add(propertyNode); } nodeNode.set("properties", propertiesArray); } ArrayNode edgesArrayNode = JsonNodeFactory.instance.arrayNode(); detailsNode.set("edges", edgesArrayNode); GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges); for (Map.Entry<Label, LabelStats> entry : edgeStats.entrySet()) { Label label = entry.getKey(); LabelStats labelStats = entry.getValue(); LabelSchema labelSchema = edgeSchemas.getSchemaFor(label); ObjectNode edgeNode = JsonNodeFactory.instance.objectNode(); edgesArrayNode.add(edgeNode); edgeNode.put("description", label.fullyQualifiedLabel()); ObjectNode labelsNode = JsonNodeFactory.instance.objectNode(); if (label.hasFromLabels()) { labelsNode.set("from", arrayNodeFromList(label.fromLabels().labels())); } labelsNode.set("edge", arrayNodeFromList(label.labels())); if (label.hasToLabels()) { labelsNode.set("to", arrayNodeFromList(label.toLabels().labels())); } edgeNode.set("labels", labelsNode); edgeNode.put("count", labelStats.count()); ArrayNode propertiesArray = JsonNodeFactory.instance.arrayNode(); for (PropertySchemaStats stats : labelSchema.propertySchemaStats()) { PropertySchema propertySchema = labelSchema.getPropertySchema(stats.property()); ObjectNode propertyNode = JsonNodeFactory.instance.objectNode(); propertyNode.put("name", stats.property().toString()); propertyNode.put("count", stats.observationCount()); propertyNode.put("numberOfRecords", stats.numberValuesCount()); propertyNode.put("minCardinality", stats.minCardinality()); propertyNode.put("maxCardinality", stats.maxCardinality()); propertyNode.put("isNullable", propertySchema.isNullable()); ObjectNode dataTypesNode = JsonNodeFactory.instance.objectNode(); ArrayNode dataTypeCountsNode = JsonNodeFactory.instance.arrayNode(); for (Map.Entry<DataType, Integer> e : stats.dataTypeCounts().entrySet()) { ObjectNode n = JsonNodeFactory.instance.objectNode(); n.put(e.getKey().name(), e.getValue()); dataTypeCountsNode.add(n); } dataTypesNode.put("inferred", propertySchema.dataType().name()); dataTypesNode.set("counts", dataTypeCountsNode); propertyNode.set("dataTypes", dataTypesNode); propertiesArray.add(propertyNode); } edgeNode.set("properties", propertiesArray); } } private ArrayNode arrayNodeFromList(Collection<String> c) { ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); for (String s : c) { arrayNode.add(s); } return arrayNode; } @Override public JsonNode toJson(GraphSchema o) { ObjectNode json = JsonNodeFactory.instance.objectNode(); addTo(json, o); return json; } private static class LabelStats { private final Label label; private final AtomicLong count = new AtomicLong(0); private LabelStats(Label label) { this.label = label; } public void increment() { count.incrementAndGet(); } public long count() { return count.get(); } public Label label() { return label; } @Override public String toString() { return String.format("%s: %s", label.fullyQualifiedLabel(), count.get()); } } }
1,006
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/RangeFactory.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.atomic.AtomicLong; import static java.lang.Math.min; public class RangeFactory { private static final Logger logger = LoggerFactory.getLogger(RangeFactory.class); public static RangeFactory create(GraphClient<?> graphClient, LabelsFilter labelsFilter, GremlinFilters gremlinFilters, RangeConfig rangeConfig, ConcurrencyConfig concurrencyConfig) { String description = labelsFilter.description(String.format("%ss", graphClient.description())); logger.info("Calculating ranges for {}", description); long estimatedNumberOfItemsInGraph = graphClient.approxCount(labelsFilter, rangeConfig, gremlinFilters); int effectiveConcurrency = estimatedNumberOfItemsInGraph < 1000 ? 1 : concurrencyConfig.concurrency(); long rangeSize = concurrencyConfig.isUnboundedParallelExecution(rangeConfig) ? (estimatedNumberOfItemsInGraph / effectiveConcurrency) + 1: rangeConfig.rangeSize(); logger.info("Estimated number of {} to export: {}, Range size: {}, Effective concurrency: {}", description, estimatedNumberOfItemsInGraph, rangeSize, effectiveConcurrency); return new RangeFactory( rangeSize, rangeConfig.numberOfItemsToExport(), rangeConfig.numberOfItemsToSkip(), estimatedNumberOfItemsInGraph, effectiveConcurrency); } private final long rangeSize; private final boolean exportAll; private final int concurrency; private final long rangeUpperBound; private final AtomicLong currentEnd; private final long numberOfItemsToExport; private RangeFactory(long rangeSize, long limit, long skip, long estimatedNumberOfItemsInGraph, int concurrency) { this.rangeSize = rangeSize; this.exportAll = limit == Long.MAX_VALUE; this.concurrency = concurrency; if (exportAll){ this.rangeUpperBound = estimatedNumberOfItemsInGraph; this.numberOfItemsToExport = estimatedNumberOfItemsInGraph - skip; } else { this.rangeUpperBound = limit + skip; this.numberOfItemsToExport = limit; } this.currentEnd = new AtomicLong(skip); } public Range nextRange() { if (isExhausted()){ return new Range(-1, -1); } long proposedEnd = currentEnd.accumulateAndGet(rangeSize, (left, right) -> left + right); long start = min(proposedEnd - rangeSize, rangeUpperBound); long actualEnd = min(proposedEnd, rangeUpperBound); if ((proposedEnd >= rangeUpperBound) && exportAll){ actualEnd = -1; } return new Range(start, actualEnd); } public long numberOfItemsToExport() { return numberOfItemsToExport; } public boolean isExhausted() { long end = currentEnd.get(); return end == -1 || end >= rangeUpperBound; } public int concurrency() { return concurrency; } }
1,007
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/Range.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Element; import static java.lang.Math.abs; public class Range { public static final Range ALL = new Range(0, -1); private final long start; private final long end; public Range(long start, long end) { this.start = start; this.end = end; } public GraphTraversal<? extends Element, ?> applyRange(GraphTraversal<? extends Element, ?> traversal) { if (isAll()) { return traversal; } else { return traversal.range(start, end); } } public long difference() { return end - start; } public boolean isEmpty() { return start == -1 && end == -1; } public boolean isAll(){ return start == 0 && end == -1; } @Override public String toString() { return "range(" + start + ", " + end + ")"; } public boolean sizeExceeds(long value) { if (isEmpty()){ return false; } if (isAll()){ return true; } return value < (end - start); } }
1,008
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/Scope.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.propertygraph.schema.TokensOnly; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; public enum Scope { all { @Override public Collection<ExportSpecification> exportSpecifications(GraphSchema graphSchema, Collection<Label> nodeLabels, Collection<Label> edgeLabels, GremlinFilters gremlinFilters, TokensOnly tokensOnly, EdgeLabelStrategy edgeLabelStrategy, ExportStats stats, FeatureToggles featureToggles) { Collection<ExportSpecification> results = new ArrayList<>(); if (graphSchema.isEmpty()) { results.add(new ExportSpecification( GraphElementType.nodes, Scope.labelsFilter(nodeLabels, NodeLabelStrategy.nodeLabelsOnly), gremlinFilters, stats, tokensOnly.nodeTokensOnly(), featureToggles)); results.add(new ExportSpecification( GraphElementType.edges, Scope.labelsFilter(edgeLabels, edgeLabelStrategy), gremlinFilters, stats, tokensOnly.edgeTokensOnly(), featureToggles)); } else { if (graphSchema.hasNodeSchemas()) { LabelsFilter labelsFilter = Scope.labelsFilter(nodeLabels, NodeLabelStrategy.nodeLabelsOnly) .intersection(graphSchema.graphElementSchemasFor(GraphElementType.nodes).labels()); if (!labelsFilter.isEmpty()) { results.add(new ExportSpecification( GraphElementType.nodes, labelsFilter, gremlinFilters, stats, tokensOnly.nodeTokensOnly(), featureToggles)); } } if (graphSchema.hasEdgeSchemas()) { LabelsFilter labelsFilter = Scope.labelsFilter(edgeLabels, edgeLabelStrategy) .intersection(graphSchema.graphElementSchemasFor(GraphElementType.edges).labels()); if (!labelsFilter.isEmpty()) { results.add(new ExportSpecification( GraphElementType.edges, labelsFilter, gremlinFilters, stats, tokensOnly.edgeTokensOnly(), featureToggles)); } } } return results; } }, nodes { @Override public Collection<ExportSpecification> exportSpecifications(GraphSchema graphSchema, Collection<Label> nodeLabels, Collection<Label> edgeLabels, GremlinFilters gremlinFilters, TokensOnly tokensOnly, EdgeLabelStrategy edgeLabelStrategy, ExportStats stats, FeatureToggles featureToggles) { if (graphSchema.isEmpty()) { return Collections.singletonList( new ExportSpecification( GraphElementType.nodes, Scope.labelsFilter(nodeLabels, NodeLabelStrategy.nodeLabelsOnly), gremlinFilters, stats, tokensOnly.nodeTokensOnly(), featureToggles) ); } else if (graphSchema.hasNodeSchemas()) { LabelsFilter labelsFilter = Scope.labelsFilter(nodeLabels, NodeLabelStrategy.nodeLabelsOnly) .intersection(graphSchema.graphElementSchemasFor(GraphElementType.nodes).labels()); if (!labelsFilter.isEmpty()) { return Collections.singletonList( new ExportSpecification( GraphElementType.nodes, labelsFilter, gremlinFilters, stats, tokensOnly.nodeTokensOnly(), featureToggles) ); } else { return Collections.emptyList(); } } else { return Collections.emptyList(); } } }, edges { @Override public Collection<ExportSpecification> exportSpecifications(GraphSchema graphSchema, Collection<Label> nodeLabels, Collection<Label> edgeLabels, GremlinFilters gremlinFilters, TokensOnly tokensOnly, EdgeLabelStrategy edgeLabelStrategy, ExportStats stats, FeatureToggles featureToggles) { if (graphSchema.isEmpty()) { return Collections.singletonList( new ExportSpecification( GraphElementType.edges, Scope.labelsFilter(edgeLabels, edgeLabelStrategy), gremlinFilters, stats, tokensOnly.edgeTokensOnly(), featureToggles) ); } else if (graphSchema.hasEdgeSchemas()) { LabelsFilter labelsFilter = Scope.labelsFilter(edgeLabels, edgeLabelStrategy) .intersection(graphSchema.graphElementSchemasFor(GraphElementType.edges).labels()); if (!labelsFilter.isEmpty()) { return Collections.singletonList( new ExportSpecification( GraphElementType.edges, labelsFilter, gremlinFilters, stats, tokensOnly.edgeTokensOnly(), featureToggles) ); } else { return Collections.emptyList(); } } else { return Collections.emptyList(); } } }; private static LabelsFilter labelsFilter(Collection<Label> labels, LabelStrategy labelStrategy) { if (labels.isEmpty()) { return new AllLabels(labelStrategy); } return new SpecifiedLabels(labels, labelStrategy); } public abstract Collection<ExportSpecification> exportSpecifications( GraphSchema graphSchema, Collection<Label> nodeLabels, Collection<Label> edgeLabels, GremlinFilters gremlinFilters, TokensOnly tokensOnly, EdgeLabelStrategy edgeLabelStrategy, ExportStats stats, FeatureToggles featureToggles); }
1,009
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/RangeConfig.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; public class RangeConfig { private final long rangeSize; private final long numberOfItemsToSkip; private final long numberOfItemsToExport; private final long approxNodeCount; private final long approxEdgeCount; public RangeConfig(long rangeSize, long numberOfItemsToSkip, long numberOfItemsToExport, long approxNodeCount, long approxEdgeCount) { this.rangeSize = rangeSize; this.numberOfItemsToSkip = numberOfItemsToSkip; this.numberOfItemsToExport = numberOfItemsToExport; this.approxNodeCount = approxNodeCount; this.approxEdgeCount = approxEdgeCount; } public long rangeSize() { return rangeSize; } public long numberOfItemsToSkip() { return numberOfItemsToSkip; } public long numberOfItemsToExport() { return numberOfItemsToExport; } public long approxNodeCount() { return approxNodeCount; } public long approxEdgeCount() { return approxEdgeCount; } }
1,010
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/TokenPrefix.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; public class TokenPrefix { private final String prefix; public TokenPrefix() { this("~"); } public TokenPrefix(String prefix) { this.prefix = prefix; } public String format(String s) { return String.format("%s%s", prefix, s); } }
1,011
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/LabelStrategy.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Element; import java.util.Collection; import java.util.Map; public interface LabelStrategy { Collection<Label> getLabels(GraphTraversalSource g); Label getLabelFor(Map<String, Object> input); Label getLabelFor(PGResult input); String[] additionalColumns(String... columns); <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t); }
1,012
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/PropertyGraphTargetConfig.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.io.KinesisConfig; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.io.Target; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.amazonaws.services.neptune.propertygraph.schema.MasterLabelSchemas; import java.io.IOException; import java.nio.file.Path; import java.util.function.Supplier; public class PropertyGraphTargetConfig { private final Directories directories; private final KinesisConfig kinesisConfig; private final PrinterOptions printerOptions; private final boolean inferSchema; private final PropertyGraphExportFormat format; private final Target output; private final boolean mergeFiles; private final boolean perLabelDirectories; public PropertyGraphTargetConfig(Directories directories, KinesisConfig kinesisConfig, PrinterOptions printerOptions, PropertyGraphExportFormat format, Target output, boolean mergeFiles, boolean perLabelDirectories, boolean inferSchema) { this.directories = directories; this.kinesisConfig = kinesisConfig; this.printerOptions = printerOptions; this.inferSchema = inferSchema; this.format = format; this.output = output; this.mergeFiles = mergeFiles; this.perLabelDirectories = perLabelDirectories; } public Target output() { return output; } public PropertyGraphExportFormat format() { return format; } public boolean mergeFiles() { return mergeFiles; } public PropertyGraphPrinter createPrinterForQueries(String name, LabelSchema labelSchema) throws IOException { return createPrinterForQueries(() -> directories.createQueryResultsFilePath(labelSchema.label().labelsAsString(), name, format), labelSchema); } private PropertyGraphPrinter createPrinterForQueries(Supplier<Path> pathSupplier, LabelSchema labelSchema) throws IOException { OutputWriter outputWriter = output.createOutputWriter(pathSupplier, kinesisConfig); return createPrinter(labelSchema, outputWriter); } public PropertyGraphPrinter createPrinterForEdges(String name, LabelSchema labelSchema) throws IOException { return createPrinterForEdges(() -> directories.createEdgesFilePath(name, format, labelSchema.label(), perLabelDirectories), labelSchema); } private PropertyGraphPrinter createPrinterForEdges(Supplier<Path> pathSupplier, LabelSchema labelSchema) throws IOException { OutputWriter outputWriter = output.createOutputWriter(pathSupplier, kinesisConfig); return createPrinter(labelSchema, outputWriter); } public PropertyGraphPrinter createPrinterForNodes(String name, LabelSchema labelSchema) throws IOException { return createPrinterForNodes(() -> directories.createNodesFilePath(name, format, labelSchema.label(), perLabelDirectories), labelSchema); } private PropertyGraphPrinter createPrinterForNodes(Supplier<Path> pathSupplier, LabelSchema labelSchema) throws IOException { OutputWriter outputWriter = output.createOutputWriter(pathSupplier, kinesisConfig); return createPrinter(labelSchema, outputWriter); } public PropertyGraphTargetConfig forFileConsolidation() { return new PropertyGraphTargetConfig(directories, kinesisConfig, printerOptions, format, output, mergeFiles, perLabelDirectories, false); } private PropertyGraphPrinter createPrinter(LabelSchema labelSchema, OutputWriter outputWriter) throws IOException { if (inferSchema) { return format.createPrinterForInferredSchema(outputWriter, labelSchema, printerOptions); } else { return format.createPrinter(outputWriter, labelSchema, printerOptions); } } public RewriteCommand createRewriteCommand(ConcurrencyConfig concurrencyConfig, FeatureToggles featureToggles) { if (output.isFileBased()) { return format.createRewriteCommand(this, concurrencyConfig, inferSchema, featureToggles); } else { return masterLabelSchemas -> masterLabelSchemas; } } public long freeSpaceInGigabytes(){ return directories.freeSpaceInGigabytes(); } }
1,013
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/NodesWriterFactory.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import java.io.IOException; public class NodesWriterFactory implements WriterFactory<PGResult> { @Override public PropertyGraphPrinter createPrinter(String name, LabelSchema labelSchema, PropertyGraphTargetConfig targetConfig) throws IOException { PropertyGraphPrinter propertyGraphPrinter = targetConfig.createPrinterForNodes(name, labelSchema); propertyGraphPrinter.printHeaderMandatoryColumns("id", "label"); propertyGraphPrinter.printHeaderRemainingColumns(labelSchema.propertySchemas()); return propertyGraphPrinter; } @Override public LabelWriter<PGResult> createLabelWriter(PropertyGraphPrinter propertyGraphPrinter, Label label) { return new NodeWriter(propertyGraphPrinter); } }
1,014
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/LabelWriters.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; public class LabelWriters<T extends PGResult> implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(LabelWriters.class); private final int maxFileDescriptorCount; private final AtomicInteger fileDescriptorCount; private final LinkedHashMap<Label, LabelWriter<T>> labelWriters = new LinkedHashMap<>(16, 0.75f, true); public LabelWriters(AtomicInteger fileDescriptorCount, int maxFileDescriptorCount) { this.fileDescriptorCount = fileDescriptorCount; this.maxFileDescriptorCount = maxFileDescriptorCount; } public boolean containsKey(Label label){ return labelWriters.containsKey(label); } public void put(Label label, LabelWriter<T> labelWriter) throws Exception { if (fileDescriptorCount.get() > maxFileDescriptorCount && labelWriters.size() > 1){ Label leastRecentlyAccessedLabel = labelWriters.keySet().iterator().next(); LabelWriter<T> leastRecentlyAccessedLabelWriter = labelWriters.remove(leastRecentlyAccessedLabel); logger.info("Closing writer for label {} for output {} so as to conserve file descriptors", leastRecentlyAccessedLabel.labelsAsString(), leastRecentlyAccessedLabelWriter.outputId()); leastRecentlyAccessedLabelWriter.close(); fileDescriptorCount.decrementAndGet(); } logger.debug("Adding writer for label {} for output {}", label.labelsAsString(), labelWriter.outputId()); labelWriters.put(label, labelWriter); fileDescriptorCount.incrementAndGet(); } @Override public void close() throws Exception { for (LabelWriter<T> writer : labelWriters.values()) { logger.info("Closing file: {}", writer.outputId()); writer.close(); fileDescriptorCount.decrementAndGet(); } } public LabelWriter<T> get(Label label) { return labelWriters.get(label); } }
1,015
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/GraphElementHandler.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import java.io.IOException; public interface GraphElementHandler<T> extends AutoCloseable { void handle(T element, boolean allowTokens) throws IOException; }
1,016
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/ExportPropertyGraphJob.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.io.StatusOutputFormat; import com.amazonaws.services.neptune.propertygraph.GremlinFilters; import com.amazonaws.services.neptune.propertygraph.RangeConfig; import com.amazonaws.services.neptune.propertygraph.RangeFactory; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; public class ExportPropertyGraphJob { private static final Logger logger = LoggerFactory.getLogger(ExportPropertyGraphJob.class); private final Collection<ExportSpecification> exportSpecifications; private final GraphSchema graphSchema; private final GraphTraversalSource g; private final RangeConfig rangeConfig; private final GremlinFilters gremlinFilters; private final ConcurrencyConfig concurrencyConfig; private final PropertyGraphTargetConfig targetConfig; private final FeatureToggles featureToggles; private final int maxFileDescriptorCount; public ExportPropertyGraphJob(Collection<ExportSpecification> exportSpecifications, GraphSchema graphSchema, GraphTraversalSource g, RangeConfig rangeConfig, GremlinFilters gremlinFilters, ConcurrencyConfig concurrencyConfig, PropertyGraphTargetConfig targetConfig, FeatureToggles featureToggles, int maxFileDescriptorCount) { this.exportSpecifications = exportSpecifications; this.graphSchema = graphSchema; this.g = g; this.rangeConfig = rangeConfig; this.gremlinFilters = gremlinFilters; this.concurrencyConfig = concurrencyConfig; this.targetConfig = targetConfig; this.featureToggles = featureToggles; this.maxFileDescriptorCount = maxFileDescriptorCount; } public GraphSchema execute() throws Exception { Map<GraphElementType, GraphElementSchemas> revisedGraphElementSchemas = new HashMap<>(); for (ExportSpecification exportSpecification : exportSpecifications) { MasterLabelSchemas masterLabelSchemas = Timer.timedActivity("exporting " + exportSpecification.description(), (CheckedActivity.Callable<MasterLabelSchemas>) () -> export(exportSpecification)); revisedGraphElementSchemas.put(masterLabelSchemas.graphElementType(), masterLabelSchemas.toGraphElementSchemas()); } return new GraphSchema(revisedGraphElementSchemas); } private MasterLabelSchemas export(ExportSpecification exportSpecification) throws Exception { Collection<FileSpecificLabelSchemas> fileSpecificLabelSchemas = new ArrayList<>(); AtomicInteger fileDescriptorCount = new AtomicInteger(); for (ExportSpecification labelSpecificExportSpecification : exportSpecification.splitByLabel()) { Collection<Future<FileSpecificLabelSchemas>> futures = new ArrayList<>(); RangeFactory rangeFactory = labelSpecificExportSpecification.createRangeFactory(g, rangeConfig, concurrencyConfig); Status status = new Status( StatusOutputFormat.Description, String.format("%s: %s total", labelSpecificExportSpecification.description(), rangeFactory.numberOfItemsToExport()), () -> String.format(" [%s GB free space]", targetConfig.freeSpaceInGigabytes())); String description = String.format("writing %s as %s to %s", labelSpecificExportSpecification.description(), targetConfig.format().description(), targetConfig.output().name()); System.err.println("Started " + description); AtomicInteger fileIndex = new AtomicInteger(); Timer.timedActivity(description, (CheckedActivity.Runnable) () -> { ExecutorService taskExecutor = Executors.newFixedThreadPool(rangeFactory.concurrency()); for (int index = 1; index <= rangeFactory.concurrency(); index++) { ExportPropertyGraphTask exportTask = labelSpecificExportSpecification.createExportTask( graphSchema, g, targetConfig, gremlinFilters, rangeFactory, status, fileIndex, fileDescriptorCount, maxFileDescriptorCount ); futures.add(taskExecutor.submit(exportTask)); } taskExecutor.shutdown(); try { if (!taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)) { logger.warn("Timeout expired with uncompleted tasks"); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } updateFileSpecificLabelSchemas(futures, fileSpecificLabelSchemas); }); } MasterLabelSchemas masterLabelSchemas = exportSpecification.createMasterLabelSchemas(fileSpecificLabelSchemas); RewriteCommand rewriteCommand = targetConfig.createRewriteCommand(concurrencyConfig, featureToggles); return rewriteCommand.execute(masterLabelSchemas); } private void updateFileSpecificLabelSchemas( Collection<Future<FileSpecificLabelSchemas>> futures, Collection<FileSpecificLabelSchemas> fileSpecificLabelSchemas) throws Exception { for (Future<FileSpecificLabelSchemas> future : futures) { if (future.isCancelled()) { throw new IllegalStateException("Unable to complete job because at least one task was cancelled"); } if (!future.isDone()) { throw new IllegalStateException("Unable to complete job because at least one task has not completed"); } fileSpecificLabelSchemas.add(future.get()); } } }
1,017
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/JsonResource.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.CommandWriter; import com.amazonaws.services.neptune.util.S3ObjectInfo; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.commons.lang.StringUtils; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URI; import java.nio.file.Files; import static java.nio.charset.StandardCharsets.UTF_8; public class JsonResource<T extends Jsonizable<E>, E> { private final String title; private final URI resourcePath; private final Class<? extends Jsonizable> clazz; public JsonResource(String title, URI resourcePath, Class<? extends Jsonizable> clazz) { this.title = title; this.resourcePath = resourcePath; this.clazz = clazz; } public void save(Jsonizable<E> object, E param) throws IOException { if (resourcePath == null) { return; } if (resourcePath.getScheme() != null && (resourcePath.getScheme().equals("s3") || resourcePath.getScheme().equals("https"))) { return; } File resourceFile = new File(resourcePath); try (Writer writer = new BufferedWriter(new OutputStreamWriter(Files.newOutputStream(resourceFile.toPath()), UTF_8))) { ObjectWriter objectWriter = new ObjectMapper().writer().withDefaultPrettyPrinter(); String json = objectWriter.writeValueAsString(object.toJson(param)); writer.write(json); } } public T get() throws IOException { if (resourcePath == null) { throw new IllegalStateException("Resource path is null"); } JsonNode json = readJson(); try { Method method = clazz.getMethod("fromJson", JsonNode.class); Object o = method.invoke(null, json); @SuppressWarnings("unchecked") T returnValue = (T) o; return returnValue; } catch (NoSuchMethodException e) { throw new RuntimeException("Jsonizable object must have a static fromJson(JsonNode) method"); } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } } public void writeResourcePathAsMessage(CommandWriter writer) { if (resourcePath == null) { return; } writer.writeMessage(title + " : " + resourcePath.toString()); } private JsonNode readJson() throws IOException { String scheme = StringUtils.isNotEmpty(resourcePath.getScheme()) ? resourcePath.getScheme() : "file"; switch (scheme) { case "https": return getFromHttps(); case "s3": return getFromS3(); default: return getFromFile(); } } private JsonNode getFromFile() throws IOException { String pathname = resourcePath.toString(); File resourceFile = pathname.startsWith("file://") ? new File(pathname.substring(7)) : new File(pathname); if (!resourceFile.exists()) { throw new IllegalStateException(String.format("%s does not exist", resourceFile)); } if (resourceFile.isDirectory()) { throw new IllegalStateException(String.format("Expected a file, but found a directory: %s", resourceFile)); } return new ObjectMapper().readTree(resourceFile); } private JsonNode getFromS3() throws IOException { S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(resourcePath.toString()); AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient(); try (InputStream stream = s3.getObject(s3ObjectInfo.bucket(), s3ObjectInfo.key()).getObjectContent()){ return new ObjectMapper().readTree(stream); } } private JsonNode getFromHttps() throws IOException { return new ObjectMapper().readTree(resourcePath.toURL()); } }
1,018
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/RewriteAndMergeCsv.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggle; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVRecord; import org.apache.commons.lang3.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.Reader; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; public class RewriteAndMergeCsv implements RewriteCommand { private static final Logger logger = LoggerFactory.getLogger(RewriteAndMergeCsv.class); private final PropertyGraphTargetConfig targetConfig; private final ConcurrencyConfig concurrencyConfig; private final FeatureToggles featureToggles; public RewriteAndMergeCsv(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, FeatureToggles featureToggles) { this.targetConfig = targetConfig; this.concurrencyConfig = concurrencyConfig; this.featureToggles = featureToggles; } @Override public MasterLabelSchemas execute(MasterLabelSchemas masterLabelSchemas) throws Exception { GraphElementType graphElementType = masterLabelSchemas.graphElementType(); System.err.println(String.format("Rewriting and merging %s files...", graphElementType.name())); return Timer.timedActivity(String.format("rewriting and merging %s files", graphElementType.name()), (CheckedActivity.Callable<MasterLabelSchemas>) () -> rewriteFiles(masterLabelSchemas, graphElementType, targetConfig)); } private MasterLabelSchemas rewriteFiles(MasterLabelSchemas masterLabelSchemas, GraphElementType graphElementType, PropertyGraphTargetConfig targetConfig) throws Exception { Map<Label, MasterLabelSchema> updatedSchemas = new HashMap<>(); Collection<Future<MasterLabelSchema>> futures = new ArrayList<>(); ExecutorService taskExecutor = Executors.newFixedThreadPool(concurrencyConfig.concurrency()); for (MasterLabelSchema masterLabelSchema : masterLabelSchemas.schemas()) { futures.add(taskExecutor.submit(() -> rewriteAndMerge(targetConfig, graphElementType, masterLabelSchema))); } taskExecutor.shutdown(); try { taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } for (Future<MasterLabelSchema> future : futures) { if (future.isCancelled()) { throw new IllegalStateException("Unable to complete rewrite because at least one task was cancelled"); } if (!future.isDone()) { throw new IllegalStateException("Unable to complete rewrite because at least one task has not completed"); } MasterLabelSchema masterLabelSchema = future.get(); updatedSchemas.put(masterLabelSchema.labelSchema().label(), masterLabelSchema); } return new MasterLabelSchemas(updatedSchemas, graphElementType); } private MasterLabelSchema rewriteAndMerge(PropertyGraphTargetConfig targetConfig, GraphElementType graphElementType, MasterLabelSchema masterLabelSchema) throws Exception { LabelSchema masterSchema = masterLabelSchema.labelSchema().createCopy(); masterSchema.initStats(); String targetFilename = Directories.fileName(String.format("%s.consolidated", masterSchema.label().fullyQualifiedLabel())); Collection<String> renamedFiles = new ArrayList<>(); try (PropertyGraphPrinter printer = graphElementType.writerFactory().createPrinter( targetFilename, masterSchema, targetConfig.forFileConsolidation())) { renamedFiles.add(printer.outputId()); for (FileSpecificLabelSchema fileSpecificLabelSchema : masterLabelSchema.fileSpecificLabelSchemas()) { try (DeletableFile file = new DeletableFile(new File(fileSpecificLabelSchema.outputId()))) { if (featureToggles.containsFeature(FeatureToggle.Keep_Rewritten_Files)){ file.doNotDelete(); } LabelSchema labelSchema = fileSpecificLabelSchema.labelSchema(); Label label = labelSchema.label(); String[] additionalElementHeaders = label.hasFromAndToLabels() ? new String[]{"~fromLabels", "~toLabels"} : new String[]{}; String[] filePropertyHeaders = labelSchema.propertySchemas().stream() .map(p -> p.property().toString()) .collect(Collectors.toList()) .toArray(new String[]{}); String[] fileHeaders = ArrayUtils.addAll( graphElementType.tokenNames().toArray(new String[]{}), ArrayUtils.addAll(additionalElementHeaders, filePropertyHeaders)); logger.info("File: {}, Headers: [{}]", fileSpecificLabelSchema.outputId(), fileHeaders); try (Reader in = file.reader()) { CSVFormat format = CSVFormat.RFC4180 .withSkipHeaderRecord(false) // files will not have headers .withHeader(fileHeaders); Iterable<CSVRecord> records = format.parse(in); for (CSVRecord record : records) { printer.printStartRow(); if (graphElementType == GraphElementType.nodes) { printer.printNode(record.get("~id"), Arrays.asList(record.get("~label").split(";"))); } else { if (label.hasFromAndToLabels()) { printer.printEdge( record.get("~id"), record.get("~label"), record.get("~from"), record.get("~to"), Arrays.asList(record.get("~fromLabels").split(";")), Arrays.asList(record.get("~toLabels").split(";"))); } else { printer.printEdge(record.get("~id"), record.get("~label"), record.get("~from"), record.get("~to")); } } printer.printProperties(record.toMap(), false); printer.printEndRow(); } } catch (Exception e) { logger.error("Error while rewriting file: {}", fileSpecificLabelSchema.outputId(), e); file.doNotDelete(); throw e; } } } } return new MasterLabelSchema( masterSchema, renamedFiles.stream().map(f -> new FileSpecificLabelSchema(f, targetConfig.format(), masterSchema)).collect(Collectors.toList())); } }
1,019
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/ExportPropertyGraphTask.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.propertygraph.*; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import com.amazonaws.services.neptune.propertygraph.schema.FileSpecificLabelSchemas; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; public class ExportPropertyGraphTask implements Callable<FileSpecificLabelSchemas> { private static final Logger logger = LoggerFactory.getLogger(ExportPropertyGraphTask.class); private final GraphElementSchemas graphElementSchemas; private final LabelsFilter labelsFilter; private final GraphClient<? extends PGResult> graphClient; private final WriterFactory<? extends PGResult> writerFactory; private final PropertyGraphTargetConfig targetConfig; private final RangeFactory rangeFactory; private final GremlinFilters gremlinFilters; private final Status status; private final AtomicInteger index; private final LabelWriters<PGResult> labelWriters; public ExportPropertyGraphTask(GraphElementSchemas graphElementSchemas, LabelsFilter labelsFilter, GraphClient<? extends PGResult> graphClient, WriterFactory<? extends PGResult> writerFactory, PropertyGraphTargetConfig targetConfig, RangeFactory rangeFactory, GremlinFilters gremlinFilters, Status status, AtomicInteger index, AtomicInteger fileDescriptorCount, int maxFileDescriptorCount) { this.graphElementSchemas = graphElementSchemas; this.labelsFilter = labelsFilter; this.graphClient = graphClient; this.writerFactory = writerFactory; this.targetConfig = targetConfig; this.rangeFactory = rangeFactory; this.gremlinFilters = gremlinFilters; this.status = status; this.index = index; this.labelWriters = new LabelWriters<>(fileDescriptorCount, maxFileDescriptorCount); } @Override public FileSpecificLabelSchemas call() { FileSpecificLabelSchemas fileSpecificLabelSchemas = new FileSpecificLabelSchemas(); CountingHandler handler = new CountingHandler( new ExportPGTaskHandler( fileSpecificLabelSchemas, graphElementSchemas, targetConfig, writerFactory, labelWriters, graphClient, status, index, labelsFilter )); try { while (status.allowContinue()) { Range range = rangeFactory.nextRange(); if (range.isEmpty()) { status.halt(); } else { graphClient.queryForValues(handler, range, labelsFilter, gremlinFilters, graphElementSchemas); if (range.sizeExceeds(handler.numberProcessed()) || rangeFactory.isExhausted()) { status.halt(); } } } } finally { try { handler.close(); } catch (Exception e) { logger.error("Error while closing handler", e); } } return fileSpecificLabelSchemas; } }
1,020
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/JsonPrinterOptions.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.TokenPrefix; public class JsonPrinterOptions { public static Builder builder(){ return new Builder(); } private final boolean strictCardinality; private final TokenPrefix tokenPrefix; private JsonPrinterOptions(boolean strictCardinality, TokenPrefix tokenPrefix) { this.strictCardinality = strictCardinality; this.tokenPrefix = tokenPrefix; } public boolean strictCardinality() { return strictCardinality; } public TokenPrefix tokenPrefix() { return tokenPrefix; } public Builder copy(){ return new Builder() .setStrictCardinality(strictCardinality) .setTokenPrefix(tokenPrefix); } public static class Builder{ private boolean strictCardinality = false; private TokenPrefix tokenPrefix = new TokenPrefix(); public Builder setStrictCardinality(boolean strictCardinality) { this.strictCardinality = strictCardinality; return this; } public Builder setTokenPrefix(TokenPrefix tokenPrefix){ this.tokenPrefix = tokenPrefix; return this; } public JsonPrinterOptions build(){ return new JsonPrinterOptions(strictCardinality, tokenPrefix); } } }
1,021
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/VariableRowCsvPropertyGraphPrinter.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; public class VariableRowCsvPropertyGraphPrinter implements PropertyGraphPrinter { private final CsvPropertyGraphPrinter csvPropertyGraphPrinter; private final OutputWriter writer; private final LabelSchema labelSchema; private boolean isNullable = false; public VariableRowCsvPropertyGraphPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) { CsvPrinterOptions csvPrinterOptions = CsvPrinterOptions.builder() .setMultiValueSeparator(printerOptions.csv().multiValueSeparator()) .build(); this.writer = writer; this.labelSchema = labelSchema; this.csvPropertyGraphPrinter = new CsvPropertyGraphPrinter( writer, labelSchema, new PrinterOptions(csvPrinterOptions), true); } @Override public String outputId() { return csvPropertyGraphPrinter.outputId(); } @Override public void printHeaderMandatoryColumns(String... columns) { // Do nothing } @Override public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) { // Do nothing } @Override public void printProperties(Map<?, ?> properties) { // Print known properties csvPropertyGraphPrinter.printProperties(properties); // Check to see whether known properties are present for (PropertySchema propertySchema : labelSchema.propertySchemas()) { if (!properties.containsKey(propertySchema.property())) { propertySchema.makeNullable(); } } // Print unknown properties for (Map.Entry<?, ?> property : properties.entrySet()) { Object key = property.getKey(); if (!labelSchema.containsProperty(key)) { Object value = property.getValue(); PropertySchema propertySchema = new PropertySchema(key); PropertySchema.PropertyValueMetadata propertyValueMetadata = propertySchema.accept(value, true); if (isNullable) { propertySchema.makeNullable(); } labelSchema.put(key, propertySchema); labelSchema.recordObservation(propertySchema, value, propertyValueMetadata); csvPropertyGraphPrinter.printProperty(propertySchema, value); } } isNullable = true; } @Override public void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException { printProperties(properties); } @Override public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException { printProperties(properties); } @Override public void printEdge(String id, String label, String from, String to) throws IOException { printEdge(id, label, from, to, null, null); } @Override public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException { csvPropertyGraphPrinter.printEdge(id, label, from, to, fromLabels, toLabels); } @Override public void printNode(String id, List<String> labels) { csvPropertyGraphPrinter.printNode(id, labels); } @Override public void printStartRow() { csvPropertyGraphPrinter.printStartRow(); } @Override public void printEndRow() { csvPropertyGraphPrinter.printEndRow(); } @Override public void close() throws Exception { writer.close(); } }
1,022
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/WriterFactory.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import java.io.IOException; public interface WriterFactory<T> { PropertyGraphPrinter createPrinter(String name, LabelSchema labelSchema, PropertyGraphTargetConfig targetConfig) throws IOException; LabelWriter<T> createLabelWriter(PropertyGraphPrinter propertyGraphPrinter, Label label); }
1,023
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/PropertyGraphExportFormat.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.FileExtension; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import org.apache.commons.io.FilenameUtils; import java.io.IOException; public enum PropertyGraphExportFormat implements FileExtension { json { @Override public String extension() { return "json"; } @Override PropertyGraphPrinter createPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { JsonGenerator generator = createJsonGenerator(writer, writer.lineSeparator()); return new JsonPropertyGraphPrinter(writer, generator, labelSchema, printerOptions); } @Override PropertyGraphPrinter createPrinterForInferredSchema(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { JsonGenerator generator = createJsonGenerator(writer, writer.lineSeparator()); return new JsonPropertyGraphPrinter(writer, generator, labelSchema, printerOptions, true); } @Override public String description() { return "JSON"; } @Override public RewriteCommand createRewriteCommand(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, boolean inferSchema, FeatureToggles featureToggles) { return RewriteCommand.NULL_COMMAND; } }, csv { @Override public String extension() { return "csv"; } @Override PropertyGraphPrinter createPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) { PrinterOptions newPrinterOptions = new PrinterOptions( printerOptions.csv().copy() .setIncludeHeaders(true) .build()); return new CsvPropertyGraphPrinter(writer, labelSchema, newPrinterOptions); } @Override PropertyGraphPrinter createPrinterForInferredSchema(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { return new VariableRowCsvPropertyGraphPrinter(writer, labelSchema, printerOptions); } @Override public String description() { return "CSV"; } @Override public RewriteCommand createRewriteCommand(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, boolean inferSchema, FeatureToggles featureToggles) { if (targetConfig.mergeFiles()) { return new RewriteAndMergeCsv(targetConfig, concurrencyConfig, featureToggles); } else { if (inferSchema) { return new RewriteCsv(targetConfig, concurrencyConfig, featureToggles); } else { return RewriteCommand.NULL_COMMAND; } } } }, csvNoHeaders { @Override public String extension() { return "csv"; } @Override PropertyGraphPrinter createPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) { PrinterOptions newPrinterOptions = new PrinterOptions( printerOptions.csv().copy() .setIncludeHeaders(false) .build()); return new CsvPropertyGraphPrinter(writer, labelSchema, newPrinterOptions); } @Override PropertyGraphPrinter createPrinterForInferredSchema(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { return new VariableRowCsvPropertyGraphPrinter(writer, labelSchema, printerOptions); } @Override public String description() { return "CSV (no headers)"; } @Override public RewriteCommand createRewriteCommand(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, boolean inferSchema, FeatureToggles featureToggles) { if (targetConfig.mergeFiles()) { return new RewriteAndMergeCsv(targetConfig, concurrencyConfig, featureToggles); } else { if (inferSchema) { return new RewriteCsv(targetConfig, concurrencyConfig, featureToggles); } else { return RewriteCommand.NULL_COMMAND; } } } }, neptuneStreamsJson { @Override public String extension() { return "json"; } @Override PropertyGraphPrinter createPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { JsonGenerator generator = createJsonGenerator(writer, ""); return new NeptuneStreamsJsonPropertyGraphPrinter(writer, generator); } @Override PropertyGraphPrinter createPrinterForInferredSchema(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { return createPrinter(writer, labelSchema, printerOptions); } @Override public String description() { return "JSON (Neptune Streams format)"; } @Override public RewriteCommand createRewriteCommand(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, boolean inferSchema, FeatureToggles featureToggles) { return RewriteCommand.NULL_COMMAND; } }, neptuneStreamsSimpleJson { @Override public String extension() { return "json"; } @Override PropertyGraphPrinter createPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { JsonGenerator generator = createJsonGenerator(writer, ""); return new NeptuneStreamsSimpleJsonPropertyGraphPrinter(writer, generator); } @Override PropertyGraphPrinter createPrinterForInferredSchema(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { return createPrinter(writer, labelSchema, printerOptions); } @Override public String description() { return "JSON (Neptune Streams simple format)"; } @Override public RewriteCommand createRewriteCommand(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, boolean inferSchema, FeatureToggles featureToggles) { return RewriteCommand.NULL_COMMAND; } }; private static JsonGenerator createJsonGenerator(OutputWriter writer, String s) throws IOException { JsonGenerator generator = new JsonFactory().createGenerator(writer.writer()); generator.setPrettyPrinter(new MinimalPrettyPrinter(s)); generator.disable(JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM); return generator; } abstract PropertyGraphPrinter createPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException; abstract PropertyGraphPrinter createPrinterForInferredSchema(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException; public abstract String description(); public abstract RewriteCommand createRewriteCommand(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, boolean inferSchema, FeatureToggles featureToggles); public String replaceExtension(String filename, String replacement){ return String.format("%s.%s", FilenameUtils.removeExtension(filename), replacement); } }
1,024
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/CommaPrinter.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.OutputWriter; class CommaPrinter { private final OutputWriter outputWriter; private boolean printComma = false; CommaPrinter(OutputWriter outputWriter) { this.outputWriter = outputWriter; } void printComma() { if (printComma) { outputWriter.print(","); } else { printComma = true; } } void init() { printComma = false; } }
1,025
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/CsvPropertyGraphPrinter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.propertygraph.TokenPrefix; import com.amazonaws.services.neptune.propertygraph.schema.DataType; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import com.amazonaws.services.neptune.util.SemicolonUtils; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; public class CsvPropertyGraphPrinter implements PropertyGraphPrinter { private final OutputWriter writer; private final LabelSchema labelSchema; private final CsvPrinterOptions printerOptions; private final boolean allowUpdateSchema; private final CommaPrinter commaPrinter; public CsvPropertyGraphPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions) { this(writer, labelSchema, printerOptions, false); } public CsvPropertyGraphPrinter(OutputWriter writer, LabelSchema labelSchema, PrinterOptions printerOptions, boolean allowUpdateSchema) { this.writer = writer; this.labelSchema = labelSchema; this.printerOptions = printerOptions.csv(); this.commaPrinter = new CommaPrinter(writer); this.allowUpdateSchema = allowUpdateSchema; } @Override public String outputId() { return writer.outputId(); } @Override public void printHeaderMandatoryColumns(String... columns) { if (printerOptions.includeHeaders() && writer.isNewTarget()) { TokenPrefix tokenPrefix = printerOptions.tokenPrefix(); for (String column : columns) { commaPrinter.printComma(); writer.print(tokenPrefix.format(column)); } } } @Override public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) { if (printerOptions.includeHeaders() && writer.isNewTarget()) { for (PropertySchema property : remainingColumns) { commaPrinter.printComma(); if (printerOptions.includeTypeDefinitions()) { writer.print(property.nameWithDataType(printerOptions.escapeCsvHeaders())); } else { writer.print(property.nameWithoutDataType(printerOptions.escapeCsvHeaders())); } } writer.print(writer.lineSeparator()); } } @Override public void printProperties(Map<?, ?> properties) { printProperties(properties, true); } @Override public void printProperties(Map<?, ?> properties, boolean applyFormatting) { for (PropertySchema propertySchema : labelSchema.propertySchemas()) { Object property = propertySchema.property(); if (properties.containsKey(property)) { Object value = properties.get(property); PropertySchema.PropertyValueMetadata propertyValueMetadata = propertySchema.accept(value, allowUpdateSchema); labelSchema.recordObservation(propertySchema, value, propertyValueMetadata); printProperty(propertySchema, value, applyFormatting); } else { commaPrinter.printComma(); } } } public void printProperty(PropertySchema schema, Object value) { printProperty(schema, value, true); } private void printProperty(PropertySchema schema, Object value, boolean applyFormatting) { DataType dataType = schema.dataType(); commaPrinter.printComma(); if (applyFormatting) { String formattedValue = isList(value) ? formatList(value, dataType, printerOptions) : dataType.format(value, printerOptions.escapeNewline()); writer.print(formattedValue); } else { if (dataType == DataType.String) { if (isSingleValueColumnWithSemicolonSeparator(schema)) { writer.print(DataType.String.format(SemicolonUtils.unescape(value.toString()), printerOptions.escapeNewline())); } else { writer.print(DataType.String.format(value, printerOptions.escapeNewline())); } } else { writer.print(String.valueOf(value)); } } } private boolean isSingleValueColumnWithSemicolonSeparator(PropertySchema schema) { return !schema.isMultiValue() && printerOptions.isSemicolonSeparator(); } @Override public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException { printProperties(properties); } @Override public void printEdge(String id, String label, String from, String to) throws IOException { printEdge(id, label, from, to, null, null); } @Override public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException { commaPrinter.printComma(); writer.print(DataType.String.format(id, printerOptions.escapeNewline())); commaPrinter.printComma(); writer.print(DataType.String.format(label, printerOptions.escapeNewline())); commaPrinter.printComma(); writer.print(DataType.String.format(from, printerOptions.escapeNewline())); commaPrinter.printComma(); writer.print(DataType.String.format(to, printerOptions.escapeNewline())); if (fromLabels != null) { commaPrinter.printComma(); writer.print(DataType.String.formatList(fromLabels, printerOptions)); } if (toLabels != null) { commaPrinter.printComma(); writer.print(DataType.String.formatList(toLabels, printerOptions)); } } @Override public void printNode(String id, List<String> labels) { commaPrinter.printComma(); writer.print(DataType.String.format(id, printerOptions.escapeNewline())); commaPrinter.printComma(); writer.print(DataType.String.formatList(labels, printerOptions)); } @Override public void printStartRow() { writer.startCommit(); commaPrinter.init(); } @Override public void printEndRow() { writer.print(writer.lineSeparator()); writer.endCommit(); } private String formatList(Object value, DataType dataType, CsvPrinterOptions options) { List<?> values = (List<?>) value; return dataType.formatList(values, options); } private boolean isList(Object value) { return value instanceof List<?>; } @Override public void close() throws Exception { writer.close(); } }
1,026
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/RenameableFiles.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; class RenameableFiles { private final Map<File, String> entries = new HashMap<>(); public void add(File file, String filename) { entries.put(file, filename); } public Collection<File> rename() { Collection<File> renamedFiles = new ArrayList<>(); for (Map.Entry<File, String> entry : entries.entrySet()) { File file = entry.getKey(); File renamedFile = new File(file.getParentFile(), entry.getValue()); file.renameTo(renamedFile); renamedFiles.add(renamedFile); } return renamedFiles; } }
1,027
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/Jsonizable.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.fasterxml.jackson.databind.JsonNode; public interface Jsonizable<T> { JsonNode toJson(T o); }
1,028
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/RewriteCommand.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.schema.MasterLabelSchemas; public interface RewriteCommand { RewriteCommand NULL_COMMAND = masterLabelSchemas -> masterLabelSchemas; MasterLabelSchemas execute(MasterLabelSchemas masterLabelSchemas) throws Exception; }
1,029
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/DeletableFile.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.Reader; class DeletableFile implements AutoCloseable { private final File file; private boolean allowDelete = true; DeletableFile(File file) { this.file = file; } public Reader reader() throws FileNotFoundException { return new FileReader(file); } public String name() { return file.getName(); } public void doNotDelete(){ allowDelete = false; } @Override public void close() { if (file.exists() && allowDelete){ boolean deletedOriginalFile = file.delete(); if (!deletedOriginalFile) { throw new IllegalStateException("Unable to delete file: " + file.getAbsolutePath()); } } } @Override public String toString() { return file.getAbsolutePath(); } }
1,030
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/NeptuneStreamsJsonPropertyGraphPrinter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.propertygraph.schema.DataType; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import com.fasterxml.jackson.core.JsonGenerator; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; public class NeptuneStreamsJsonPropertyGraphPrinter implements PropertyGraphPrinter { private static final AtomicLong COMMIT_NUM_GENERATOR = new AtomicLong(1); private final OutputWriter writer; private final JsonGenerator generator; private long commitNum = 1; private int opNum = 1; public NeptuneStreamsJsonPropertyGraphPrinter(OutputWriter writer, JsonGenerator generator) throws IOException { this.writer = writer; this.generator = generator; } @Override public String outputId() { return writer.outputId(); } @Override public void printHeaderMandatoryColumns(String... columns) { // Do nothing } @Override public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) { // Do nothing } @Override public void printProperties(Map<?, ?> properties) throws IOException { throw new RuntimeException("Neptune Streams JSON is not supported for this command"); } @Override public void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException { printProperties(properties); } @Override public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException { for (Map.Entry<?, ?> entry : properties.entrySet()) { String key = String.valueOf(entry.getKey()); Object value = entry.getValue(); if (isList(value)) { List<?> values = (List<?>) value; for (Object o : values) { PropertySchema propertySchema = new PropertySchema(key); propertySchema.accept(o, true); printRecord(id, streamOperation, key, o, propertySchema.dataType()); } } else { PropertySchema propertySchema = new PropertySchema(key); propertySchema.accept(value, true); printRecord(id, streamOperation, key, value, propertySchema.dataType()); } } } @Override public void printEdge(String id, String label, String from, String to) throws IOException { printEdge(id, label, from, to, null, null); } @Override public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException { printRecord(id, "e", "label", label, DataType.String, from, to); } @Override public void printNode(String id, List<String> labels) throws IOException { for (String l : labels) { printRecord(id, "vl", "label", l, DataType.String); } } @Override public void printStartRow() throws IOException { commitNum = COMMIT_NUM_GENERATOR.getAndIncrement(); opNum = 1; writer.startCommit(); } @Override public void printEndRow() throws IOException { generator.flush(); writer.endCommit(); } @Override public void close() throws Exception { generator.close(); writer.close(); } private void printRecord(String id, String streamOperation, String key, Object value, DataType dataType) throws IOException { printRecord(id, streamOperation, key, value, dataType, null, null); } private void printRecord(String id, String streamOperation, String key, Object value, DataType dataType, String from, String to) throws IOException { writer.startOp(); generator.writeStartObject(); generator.writeObjectFieldStart("eventId"); generator.writeNumberField("commitNum", commitNum); generator.writeNumberField("opNum", opNum++); generator.writeEndObject(); generator.writeObjectFieldStart("data"); generator.writeStringField("id", id); generator.writeStringField("type", streamOperation); generator.writeStringField("key", key); generator.writeObjectFieldStart("value"); dataType.printAsStringTo(generator, "value", value); generator.writeStringField("dataType", dataType.name()); generator.writeEndObject(); if (from != null) { generator.writeStringField("from", from); } if (to != null) { generator.writeStringField("to", to); } generator.writeEndObject(); generator.writeStringField("op", "ADD"); generator.writeEndObject(); generator.flush(); writer.endOp(); } private boolean isList(Object value) { return value instanceof List<?>; } }
1,031
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/CountingHandler.java
package com.amazonaws.services.neptune.propertygraph.io; import java.io.IOException; class CountingHandler<T> implements GraphElementHandler<T> { private final GraphElementHandler<T> parent; private long counter = 0; CountingHandler(GraphElementHandler<T> parent) { this.parent = parent; } @Override public void handle(T input, boolean allowTokens) throws IOException { parent.handle(input, allowTokens); counter++; } long numberProcessed() { return counter; } @Override public void close() throws Exception { parent.close(); } }
1,032
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/NodeWriter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import java.io.IOException; import java.util.List; import java.util.Map; public class NodeWriter implements LabelWriter<PGResult> { private final PropertyGraphPrinter propertyGraphPrinter; public NodeWriter(PropertyGraphPrinter propertyGraphPrinter) { this.propertyGraphPrinter = propertyGraphPrinter; } @Override public void handle(PGResult node, boolean allowTokens) throws IOException { Map<?, Object> properties = node.getProperties(); String id = String.valueOf(node.getId()); List<String> labels = node.getLabel(); labels = Label.fixLabelsIssue(labels); propertyGraphPrinter.printStartRow(); propertyGraphPrinter.printNode(id, labels); propertyGraphPrinter.printProperties(id, "vp", properties); propertyGraphPrinter.printEndRow(); } @Override public void close() throws Exception { propertyGraphPrinter.close(); } @Override public String outputId() { return propertyGraphPrinter.outputId(); } }
1,033
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/CsvPrinterOptions.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.TokenPrefix; public class CsvPrinterOptions { public static Builder builder(){ return new Builder(); } private final String multiValueSeparator; private final boolean includeTypeDefinitions; private final boolean escapeCsvHeaders; private final boolean includeHeaders; private final boolean isSemicolonSeparator; private final boolean escapeNewline; private final TokenPrefix tokenPrefix; private CsvPrinterOptions(String multiValueSeparator, boolean includeTypeDefinitions, boolean escapeCsvHeaders, boolean includeHeaders, boolean escapeNewline, TokenPrefix tokenPrefix) { this.multiValueSeparator = multiValueSeparator; this.includeTypeDefinitions = includeTypeDefinitions; this.escapeCsvHeaders = escapeCsvHeaders; this.includeHeaders = includeHeaders; this.escapeNewline = escapeNewline; this.isSemicolonSeparator = multiValueSeparator.equalsIgnoreCase(";"); this.tokenPrefix = tokenPrefix; } public String multiValueSeparator() { return multiValueSeparator; } public boolean includeTypeDefinitions() { return includeTypeDefinitions; } public boolean escapeCsvHeaders() { return escapeCsvHeaders; } public boolean includeHeaders() { return includeHeaders; } public boolean escapeNewline() { return escapeNewline; } public boolean isSemicolonSeparator() { return isSemicolonSeparator; } public TokenPrefix tokenPrefix() { return tokenPrefix; } public Builder copy(){ return new Builder() .setMultiValueSeparator(multiValueSeparator) .setIncludeTypeDefinitions(includeTypeDefinitions) .setEscapeCsvHeaders(escapeCsvHeaders) .setIncludeHeaders(includeHeaders) .setEscapeNewline(escapeNewline) .setTokenPrefix(tokenPrefix); } public static class Builder { private String multiValueSeparator = ""; private boolean includeTypeDefinitions = false; private boolean escapeCsvHeaders = false; private boolean includeHeaders = false; private boolean escapeNewline = false; private TokenPrefix tokenPrefix = new TokenPrefix(); public Builder setMultiValueSeparator(String multiValueSeparator) { this.multiValueSeparator = multiValueSeparator; return this; } public Builder setIncludeTypeDefinitions(boolean includeTypeDefinitions) { this.includeTypeDefinitions = includeTypeDefinitions; return this; } public Builder setEscapeCsvHeaders(boolean escapeCsvHeaders) { this.escapeCsvHeaders = escapeCsvHeaders; return this; } public Builder setIncludeHeaders(boolean includeHeaders) { this.includeHeaders = includeHeaders; return this; } public Builder setEscapeNewline(boolean escapeNewline) { this.escapeNewline = escapeNewline; return this; } public Builder setTokenPrefix(TokenPrefix tokenPrefix){ this.tokenPrefix = tokenPrefix; return this; } public CsvPrinterOptions build(){ return new CsvPrinterOptions(multiValueSeparator, includeTypeDefinitions, escapeCsvHeaders, includeHeaders, escapeNewline, tokenPrefix); } } }
1,034
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/PrinterOptions.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; public class PrinterOptions { public static final PrinterOptions NULL_OPTIONS = new PrinterOptions( CsvPrinterOptions.builder().build(), JsonPrinterOptions.builder().build()); private final CsvPrinterOptions csvPrinterOptions; private final JsonPrinterOptions jsonPrinterOptions; public PrinterOptions(CsvPrinterOptions csvPrinterOptions) { this(csvPrinterOptions, JsonPrinterOptions.builder().build()); } public PrinterOptions(JsonPrinterOptions jsonPrinterOptions) { this(CsvPrinterOptions.builder().build(), jsonPrinterOptions); } public PrinterOptions(CsvPrinterOptions csvPrinterOptions, JsonPrinterOptions jsonPrinterOptions) { this.csvPrinterOptions = csvPrinterOptions; this.jsonPrinterOptions = jsonPrinterOptions; } public CsvPrinterOptions csv() { return csvPrinterOptions; } public JsonPrinterOptions json() { return jsonPrinterOptions; } }
1,035
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/QueriesWriterFactory.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import java.io.IOException; import java.util.Map; public class QueriesWriterFactory implements WriterFactory<Map<?, ?>> { @Override public PropertyGraphPrinter createPrinter(String name, LabelSchema labelSchema, PropertyGraphTargetConfig targetConfig) throws IOException { PropertyGraphPrinter propertyGraphPrinter = targetConfig.createPrinterForQueries(name, labelSchema); propertyGraphPrinter.printHeaderRemainingColumns(labelSchema.propertySchemas()); return propertyGraphPrinter; } @Override public LabelWriter<Map<?, ?>> createLabelWriter(PropertyGraphPrinter propertyGraphPrinter, Label label) { return new QueryWriter(propertyGraphPrinter); } }
1,036
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/LabelWriter.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; public interface LabelWriter<T> extends GraphElementHandler<T> { String outputId(); }
1,037
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/QueryTask.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.propertygraph.AllLabels; import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.LabelsFilter; import com.amazonaws.services.neptune.propertygraph.NamedQuery; import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient; import com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult; import com.amazonaws.services.neptune.propertygraph.io.result.QueriesEdgeResult; import com.amazonaws.services.neptune.propertygraph.io.result.QueriesNodeResult; import com.amazonaws.services.neptune.propertygraph.schema.FileSpecificLabelSchemas; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.amazonaws.services.neptune.util.Activity; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import org.apache.tinkerpop.gremlin.driver.ResultSet; import org.apache.tinkerpop.gremlin.structure.Direction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; public class QueryTask implements Callable<Map<GraphElementType, FileSpecificLabelSchemas>> { private static final Logger logger = LoggerFactory.getLogger(QueryTask.class); private final Queue<NamedQuery> queries; private final NeptuneGremlinClient.QueryClient queryClient; private final PropertyGraphTargetConfig targetConfig; private final boolean twoPassAnalysis; private final Long timeoutMillis; private final Status status; private final AtomicInteger index; private final boolean structuredOutput; private final LabelsFilter nodeLabelFilter; private final LabelsFilter edgeLabelFilter; public QueryTask(Queue<NamedQuery> queries, NeptuneGremlinClient.QueryClient queryClient, PropertyGraphTargetConfig targetConfig, boolean twoPassAnalysis, Long timeoutMillis, Status status, AtomicInteger index, boolean structuredOutput, LabelsFilter nodeLabelFilter, LabelsFilter edgeLabelFilter) { this.queries = queries; this.queryClient = queryClient; this.targetConfig = targetConfig; this.twoPassAnalysis = twoPassAnalysis; this.timeoutMillis = timeoutMillis; this.status = status; this.index = index; this.structuredOutput = structuredOutput; this.nodeLabelFilter = nodeLabelFilter; this.edgeLabelFilter = edgeLabelFilter; } @Override public Map<GraphElementType, FileSpecificLabelSchemas> call() throws Exception { QueriesWriterFactory writerFactory = new QueriesWriterFactory(); Map<Label, LabelWriter<Map<?, ?>>> labelWriters = new HashMap<>(); Map<GraphElementType, FileSpecificLabelSchemas> fileSpecificLabelSchemasMap = new HashMap<>(); fileSpecificLabelSchemasMap.put(GraphElementType.nodes, new FileSpecificLabelSchemas()); fileSpecificLabelSchemasMap.put(GraphElementType.edges, new FileSpecificLabelSchemas()); try { while (status.allowContinue()) { try { NamedQuery namedQuery = queries.poll(); if (!(namedQuery == null)) { final GraphElementSchemas graphElementSchemas = new GraphElementSchemas(); if (twoPassAnalysis) { Timer.timedActivity(String.format("generating schema for query [%s]", namedQuery.query()), (Activity.Runnable) () -> updateSchema(namedQuery, graphElementSchemas)); } Timer.timedActivity(String.format("executing query [%s]", namedQuery.query()), (CheckedActivity.Runnable) () -> executeQuery(namedQuery, writerFactory, labelWriters, graphElementSchemas, fileSpecificLabelSchemasMap)); } else { status.halt(); } } catch (IllegalStateException e) { logger.warn("Unexpected result value. {}. Proceeding with next query.", e.getMessage()); } } } finally { for (LabelWriter<Map<?, ?>> labelWriter : labelWriters.values()) { try { labelWriter.close(); } catch (Exception e) { logger.warn("Error closing label writer: {}.", e.getMessage()); } } } return fileSpecificLabelSchemasMap; } private void updateSchema(NamedQuery namedQuery, GraphElementSchemas graphElementSchemas) { ResultSet firstPassResults = queryClient.submit(namedQuery.query(), timeoutMillis); firstPassResults.stream(). map(r -> castToMap(r.getObject())). forEach(r -> { graphElementSchemas.update(new Label(namedQuery.name()), r, true); }); } private void executeQuery(NamedQuery namedQuery, QueriesWriterFactory writerFactory, Map<Label, LabelWriter<Map<?, ?>>> labelWriters, GraphElementSchemas graphElementSchemas, Map<GraphElementType, FileSpecificLabelSchemas> fileSpecificLabelSchemasMap) { ResultSet results = queryClient.submit(namedQuery.query(), timeoutMillis); GraphElementHandler<Map<?, ?>> handler; if(structuredOutput) { handler = new QueriesResultWrapperHandler( new CountingHandler<QueriesNodeResult>( new ExportPGTaskHandler<QueriesNodeResult>( fileSpecificLabelSchemasMap.get(GraphElementType.nodes), graphElementSchemas, targetConfig, (WriterFactory<QueriesNodeResult>) GraphElementType.nodes.writerFactory(), new LabelWriters<>(new AtomicInteger(),0), null, status, index, nodeLabelFilter) ), new CountingHandler<QueriesEdgeResult>( new ExportPGTaskHandler<QueriesEdgeResult>( fileSpecificLabelSchemasMap.get(GraphElementType.edges), graphElementSchemas, targetConfig, (WriterFactory<QueriesEdgeResult>) GraphElementType.edges.writerFactory(), new LabelWriters<>(new AtomicInteger(),0), null, status, index, edgeLabelFilter) ) ); } else { ResultsHandler resultsHandler = new ResultsHandler( new Label(namedQuery.name()), labelWriters, writerFactory, graphElementSchemas); handler = new StatusHandler(resultsHandler, status); } results.stream(). map(r -> castToMap(r.getObject())). forEach(r -> { try { handler.handle(r, true); } catch (IOException e) { throw new RuntimeException(e); } }); } private HashMap<?, ?> castToMap(Object o) { if (Map.class.isAssignableFrom(o.getClass())) { return (HashMap<?, ?>) o; } throw new IllegalStateException("Expected Map, found " + o.getClass().getSimpleName()); } private class ResultsHandler implements GraphElementHandler<Map<?, ?>> { private final Label label; private final Map<Label, LabelWriter<Map<?, ?>>> labelWriters; private final QueriesWriterFactory writerFactory; private final GraphElementSchemas graphElementSchemas; private ResultsHandler(Label label, Map<Label, LabelWriter<Map<?, ?>>> labelWriters, QueriesWriterFactory writerFactory, GraphElementSchemas graphElementSchemas) { this.label = label; this.labelWriters = labelWriters; this.writerFactory = writerFactory; this.graphElementSchemas = graphElementSchemas; } private void createWriter(Map<?, ?> properties, boolean allowStructuralElements) { try { if (!graphElementSchemas.hasSchemaFor(label)) { graphElementSchemas.update(label, properties, allowStructuralElements); } LabelSchema labelSchema = graphElementSchemas.getSchemaFor(label); PropertyGraphPrinter propertyGraphPrinter = writerFactory.createPrinter(Directories.fileName(label.fullyQualifiedLabel(), index), labelSchema, targetConfig); labelWriters.put(label, writerFactory.createLabelWriter(propertyGraphPrinter, label)); } catch (IOException e) { throw new RuntimeException(e); } } @Override public void handle(Map<?, ?> properties, boolean allowTokens) throws IOException { if (!labelWriters.containsKey(label)) { createWriter(properties, allowTokens); } labelWriters.get(label).handle(properties, allowTokens); } @Override public void close() throws Exception { // Do nothing } } private static class StatusHandler implements GraphElementHandler<Map<?, ?>> { private final GraphElementHandler<Map<?, ?>> parent; private final Status status; private StatusHandler(GraphElementHandler<Map<?, ?>> parent, Status status) { this.parent = parent; this.status = status; } @Override public void handle(Map<?, ?> input, boolean allowTokens) throws IOException { parent.handle(input, allowTokens); status.update(); } @Override public void close() throws Exception { parent.close(); } } private static class QueriesResultWrapperHandler implements GraphElementHandler<Map<?, ?>> { private final GraphElementHandler<QueriesNodeResult> nodeParent; private final GraphElementHandler<QueriesEdgeResult> edgeParent; private QueriesResultWrapperHandler(GraphElementHandler<QueriesNodeResult> nodeParent, GraphElementHandler<QueriesEdgeResult> edgeParent) { this.nodeParent = nodeParent; this.edgeParent = edgeParent; } @Override public void handle(Map<?, ?> input, boolean allowTokens) throws IOException { if(isEdge(input)) { edgeParent.handle(getQueriesEdgeResult(input), allowTokens); } else { nodeParent.handle(getQueriesNodeResult(input), allowTokens); } } @Override public void close() throws Exception { nodeParent.close(); } private boolean isEdge(Map<?, ?> input) { return input.containsKey(Direction.IN) && input.containsKey(Direction.OUT); } private QueriesNodeResult getQueriesNodeResult(Map<?, ?> map) { return new QueriesNodeResult(map); } private QueriesEdgeResult getQueriesEdgeResult(Map<?, ?> map) { return new QueriesEdgeResult(map); } } }
1,038
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/SerializationConfig.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import org.apache.tinkerpop.gremlin.driver.Cluster; import org.apache.tinkerpop.gremlin.driver.MessageSerializer; import org.apache.tinkerpop.gremlin.driver.ser.GraphBinaryMessageSerializerV1; import org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV3d0; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; public class SerializationConfig { private final String serializer; private final int maxContentLength; private final int batchSize; private final boolean useJanusSerializer; public SerializationConfig(String serializer, int maxContentLength, int batchSize, boolean useJanusSerializer) { this.serializer = serializer; this.maxContentLength = maxContentLength; this.batchSize = batchSize; this.useJanusSerializer = useJanusSerializer; } public Cluster.Builder apply(Cluster.Builder builder) { Cluster.Builder b = builder.resultIterationBatchSize(batchSize) .maxContentLength(maxContentLength); if (useJanusSerializer) { Map<String, Object> config = new HashMap<>(); config.put("ioRegistries", Collections.singletonList("org.janusgraph.graphdb.tinkerpop.JanusGraphIoRegistry")); MessageSerializer s = new GraphSONMessageSerializerV3d0(); s.configure(config, null); return b.serializer(s); } else { return b.serializer(serializer); } } }
1,039
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/ExportPGTaskHandler.java
package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.propertygraph.GraphClient; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.LabelsFilter; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import com.amazonaws.services.neptune.propertygraph.schema.FileSpecificLabelSchemas; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; class ExportPGTaskHandler<T extends PGResult> implements GraphElementHandler<T> { private static final Logger logger = LoggerFactory.getLogger(ExportPGTaskHandler.class); private final FileSpecificLabelSchemas fileSpecificLabelSchemas; private final GraphElementSchemas graphElementSchemas; private final PropertyGraphTargetConfig targetConfig; private final WriterFactory<T> writerFactory; private final LabelWriters<T> labelWriters; private final GraphClient<T> graphClient; private final Status status; private final AtomicInteger index; private final LabelsFilter labelsFilter; ExportPGTaskHandler(FileSpecificLabelSchemas fileSpecificLabelSchemas, GraphElementSchemas graphElementSchemas, PropertyGraphTargetConfig targetConfig, WriterFactory<T> writerFactory, LabelWriters<T> labelWriters, GraphClient<T> graphClient, Status status, AtomicInteger index, LabelsFilter labelsFilter) { this.fileSpecificLabelSchemas = fileSpecificLabelSchemas; this.graphElementSchemas = graphElementSchemas; this.targetConfig = targetConfig; this.writerFactory = writerFactory; this.labelWriters = labelWriters; this.graphClient = graphClient; this.status = status; this.index = index; this.labelsFilter = labelsFilter; } @Override public void handle(T input, boolean allowTokens) throws IOException { status.update(); Label label = labelsFilter.getLabelFor(input); if (!labelWriters.containsKey(label)) { createWriterFor(label); } if(graphClient != null) { graphClient.updateStats(label); } labelWriters.get(label).handle(input, allowTokens); } @Override public void close() { try { labelWriters.close(); } catch (Exception e) { logger.warn("Error closing label writer: {}.", e.getMessage()); } } private void createWriterFor(Label label) { try { LabelSchema labelSchema = graphElementSchemas.getSchemaFor(label); PropertyGraphPrinter propertyGraphPrinter = writerFactory.createPrinter( Directories.fileName(label.fullyQualifiedLabel(), index), labelSchema, targetConfig); LabelWriter<T> labelWriter = writerFactory.createLabelWriter(propertyGraphPrinter, labelSchema.label()); labelWriters.put(label, labelWriter); fileSpecificLabelSchemas.add(labelWriter.outputId(), targetConfig.format(), labelSchema); } catch (Exception e) { throw new RuntimeException(e); } } }
1,040
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/EdgeWriter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.LabelsFilter; import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import java.io.IOException; import java.util.*; public class EdgeWriter implements LabelWriter<PGResult> { private final PropertyGraphPrinter propertyGraphPrinter; private final boolean hasFromAndToLabels; public EdgeWriter(PropertyGraphPrinter propertyGraphPrinter, Label label) { this.propertyGraphPrinter = propertyGraphPrinter; this.hasFromAndToLabels = label.hasFromAndToLabels(); } @Override public void handle(PGResult edge, boolean allowTokens) throws IOException { String from = edge.getFrom(); String to = edge.getTo(); Map<?, Object> properties = edge.getProperties(); String id = edge.getId(); String label = edge.getLabel().get(0); propertyGraphPrinter.printStartRow(); if (hasFromAndToLabels){ List<String> fromLabels = edge.getFromLabels(); List<String> toLabels = edge.getToLabels(); // Temp fix for concatenated label issue fromLabels = Label.fixLabelsIssue(fromLabels); toLabels = Label.fixLabelsIssue(toLabels); propertyGraphPrinter.printEdge(id, label, from, to, fromLabels, toLabels); } else { propertyGraphPrinter.printEdge(id, label, from, to); } propertyGraphPrinter.printProperties(id, "ep", properties); propertyGraphPrinter.printEndRow(); } @Override public void close() throws Exception { propertyGraphPrinter.close(); } @Override public String outputId() { return propertyGraphPrinter.outputId(); } }
1,041
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/PropertyGraphPrinter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; public interface PropertyGraphPrinter extends AutoCloseable { String outputId(); void printHeaderMandatoryColumns(String... columns); void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns); void printProperties(Map<?, ?> properties) throws IOException; void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException; void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException; void printEdge(String id, String label, String from, String to) throws IOException; void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException; void printNode(String id, List<String> labels) throws IOException; void printStartRow() throws IOException; void printEndRow() throws IOException; }
1,042
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/RewriteCsv.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggle; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVRecord; import org.apache.commons.lang3.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.Reader; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; public class RewriteCsv implements RewriteCommand { private static final Logger logger = LoggerFactory.getLogger(RewriteCsv.class); private final PropertyGraphTargetConfig targetConfig; private final ConcurrencyConfig concurrencyConfig; private final FeatureToggles featureToggles; public RewriteCsv(PropertyGraphTargetConfig targetConfig, ConcurrencyConfig concurrencyConfig, FeatureToggles featureToggles) { this.targetConfig = targetConfig; this.concurrencyConfig = concurrencyConfig; this.featureToggles = featureToggles; } @Override public MasterLabelSchemas execute(MasterLabelSchemas masterLabelSchemas) throws Exception { GraphElementType graphElementType = masterLabelSchemas.graphElementType(); System.err.println(String.format("Rewriting %s files...", graphElementType.name())); return Timer.timedActivity(String.format("rewriting %s files", graphElementType.name()), (CheckedActivity.Callable<MasterLabelSchemas>) () -> rewriteFiles(masterLabelSchemas, graphElementType, targetConfig)); } private MasterLabelSchemas rewriteFiles(MasterLabelSchemas masterLabelSchemas, GraphElementType graphElementType, PropertyGraphTargetConfig targetConfig) throws Exception { Map<Label, MasterLabelSchema> updatedSchemas = new HashMap<>(); Collection<Future<MasterLabelSchema>> futures = new ArrayList<>(); ExecutorService taskExecutor = Executors.newFixedThreadPool(concurrencyConfig.concurrency()); for (MasterLabelSchema masterLabelSchema : masterLabelSchemas.schemas()) { futures.add(taskExecutor.submit(() -> rewrite(targetConfig, graphElementType, masterLabelSchema))); } taskExecutor.shutdown(); try { taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } for (Future<MasterLabelSchema> future : futures) { if (future.isCancelled()) { throw new IllegalStateException("Unable to complete rewrite because at least one task was cancelled"); } if (!future.isDone()) { throw new IllegalStateException("Unable to complete rewrite because at least one task has not completed"); } MasterLabelSchema masterLabelSchema = future.get(); updatedSchemas.put(masterLabelSchema.labelSchema().label(), masterLabelSchema); } return new MasterLabelSchemas(updatedSchemas, graphElementType); } private MasterLabelSchema rewrite(PropertyGraphTargetConfig targetConfig, GraphElementType graphElementType, MasterLabelSchema masterLabelSchema) throws Exception { LabelSchema originalLabelSchema = masterLabelSchema.labelSchema(); LabelSchema masterSchema = originalLabelSchema.createCopy(); masterSchema.initStats(); Collection<String> renamedFiles = new ArrayList<>(); for (FileSpecificLabelSchema fileSpecificLabelSchema : masterLabelSchema.fileSpecificLabelSchemas()) { LabelSchema labelSchema = fileSpecificLabelSchema.labelSchema(); Label label = labelSchema.label(); File sourceCsvFile = new File(fileSpecificLabelSchema.outputId()); if (!sourceCsvFile.exists()) { if (label.labels().size() > 1) { logger.warn("Skipping multi-label file {} because it has already been rewritten under another label", sourceCsvFile); continue; } } String[] additionalElementHeaders = label.hasFromAndToLabels() ? new String[]{"~fromLabels", "~toLabels"} : new String[]{}; String[] filePropertyHeaders = labelSchema.propertySchemas().stream() .map(p -> p.property().toString()) .collect(Collectors.toList()) .toArray(new String[]{}); String[] fileHeaders = ArrayUtils.addAll( graphElementType.tokenNames().toArray(new String[]{}), ArrayUtils.addAll(additionalElementHeaders, filePropertyHeaders)); try (DeletableFile sourceFile = new DeletableFile(sourceCsvFile); Reader in = sourceFile.reader(); PropertyGraphPrinter target = graphElementType.writerFactory().createPrinter( targetConfig.format().replaceExtension(sourceCsvFile.getName(), "modified"), masterSchema, targetConfig.forFileConsolidation()); ) { if (featureToggles.containsFeature(FeatureToggle.Keep_Rewritten_Files)){ sourceFile.doNotDelete(); } renamedFiles.add(target.outputId()); CSVFormat format = CSVFormat.RFC4180.withHeader(fileHeaders); Iterable<CSVRecord> records = format.parse(in); int recordCount = 0; for (CSVRecord record : records) { target.printStartRow(); if (graphElementType == GraphElementType.nodes) { target.printNode(record.get("~id"), Arrays.asList(record.get("~label").split(";"))); } else { if (label.hasFromAndToLabels()) { target.printEdge( record.get("~id"), record.get("~label"), record.get("~from"), record.get("~to"), Arrays.asList(record.get("~fromLabels").split(";")), Arrays.asList(record.get("~toLabels").split(";"))); } else { target.printEdge(record.get("~id"), record.get("~label"), record.get("~from"), record.get("~to")); } } target.printProperties(record.toMap(), false); target.printEndRow(); recordCount++; } logger.info("Original: {}, Rewritten: {}, RecordCount: {}", sourceFile, target.outputId(), recordCount); } } return new MasterLabelSchema( masterSchema, renamedFiles.stream().map(f -> new FileSpecificLabelSchema(f, targetConfig.format(), masterSchema)).collect(Collectors.toList())); } }
1,043
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/EdgesWriterFactory.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import java.io.IOException; import java.util.Map; public class EdgesWriterFactory implements WriterFactory<PGResult> { @Override public PropertyGraphPrinter createPrinter(String name, LabelSchema labelSchema, PropertyGraphTargetConfig targetConfig) throws IOException { PropertyGraphPrinter propertyGraphPrinter = targetConfig.createPrinterForEdges(name, labelSchema); if (labelSchema.label().hasFromAndToLabels()){ propertyGraphPrinter.printHeaderMandatoryColumns("id", "label", "from", "to", "fromLabels", "toLabels"); } else { propertyGraphPrinter.printHeaderMandatoryColumns("id", "label", "from", "to"); } propertyGraphPrinter.printHeaderRemainingColumns(labelSchema.propertySchemas()); return propertyGraphPrinter; } @Override public LabelWriter<PGResult> createLabelWriter(PropertyGraphPrinter propertyGraphPrinter, Label label) { return new EdgeWriter(propertyGraphPrinter, label); } }
1,044
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/QueryJob.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.io.StatusOutputFormat; import com.amazonaws.services.neptune.propertygraph.AllLabels; import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.LabelsFilter; import com.amazonaws.services.neptune.propertygraph.NamedQuery; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient; import com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification; import com.amazonaws.services.neptune.propertygraph.schema.FileSpecificLabelSchemas; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import com.amazonaws.services.neptune.propertygraph.schema.MasterLabelSchemas; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.Queue; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; public class QueryJob { private final Queue<NamedQuery> queries; private final NeptuneGremlinClient.QueryClient queryClient; private final ConcurrencyConfig concurrencyConfig; private final PropertyGraphTargetConfig targetConfig; private final boolean twoPassAnalysis; private final Long timeoutMillis; private final Collection<ExportSpecification> exportSpecifications; private final FeatureToggles featureToggles; private final boolean structuredOutput; public QueryJob(Collection<NamedQuery> queries, NeptuneGremlinClient.QueryClient queryClient, ConcurrencyConfig concurrencyConfig, PropertyGraphTargetConfig targetConfig, boolean twoPassAnalysis, Long timeoutMillis, Collection<ExportSpecification> exportSpecifications, FeatureToggles featureToggles, boolean structuredOutput){ this.queries = new ConcurrentLinkedQueue<>(queries); this.queryClient = queryClient; this.concurrencyConfig = concurrencyConfig; this.targetConfig = targetConfig; this.twoPassAnalysis = twoPassAnalysis; this.timeoutMillis = timeoutMillis; this.exportSpecifications = exportSpecifications; this.featureToggles = featureToggles; this.structuredOutput = structuredOutput; } public void execute() throws Exception { Timer.timedActivity("exporting results from queries", (CheckedActivity.Runnable) this::export); } private void export() throws ExecutionException, InterruptedException { System.err.println("Writing query results to " + targetConfig.output().name() + " as " + targetConfig.format().description()); Status status = new Status(StatusOutputFormat.Description, "query results"); ExecutorService taskExecutor = Executors.newFixedThreadPool(concurrencyConfig.concurrency()); Collection<Future<Map<GraphElementType, FileSpecificLabelSchemas>>> futures = new ArrayList<>(); Collection<FileSpecificLabelSchemas> nodesFileSpecificLabelSchemas = new ArrayList<>(); Collection<FileSpecificLabelSchemas> edgesFileSpecificLabelSchemas = new ArrayList<>(); LabelsFilter nodeLabelFilter = new AllLabels(NodeLabelStrategy.nodeLabelsOnly); LabelsFilter edgeLabelFilter = new AllLabels(EdgeLabelStrategy.edgeLabelsOnly); for(ExportSpecification exportSpecification : exportSpecifications) { if (exportSpecification.getGraphElementType() == GraphElementType.nodes) { nodeLabelFilter = exportSpecification.getLabelsFilter(); } else { edgeLabelFilter = exportSpecification.getLabelsFilter(); } } AtomicInteger fileIndex = new AtomicInteger(); for (int index = 1; index <= concurrencyConfig.concurrency(); index++) { QueryTask queryTask = new QueryTask( queries, queryClient, targetConfig, twoPassAnalysis, timeoutMillis, status, fileIndex, structuredOutput, nodeLabelFilter, edgeLabelFilter); futures.add(taskExecutor.submit(queryTask)); } taskExecutor.shutdown(); try { taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } for (Future<Map<GraphElementType, FileSpecificLabelSchemas>> future : futures) { if (future.isCancelled()) { throw new IllegalStateException("Unable to complete job because at least one task was cancelled"); } if (!future.isDone()) { throw new IllegalStateException("Unable to complete job because at least one task has not completed"); } Map<GraphElementType, FileSpecificLabelSchemas> result = future.get(); nodesFileSpecificLabelSchemas.add(result.get(GraphElementType.nodes)); edgesFileSpecificLabelSchemas.add(result.get(GraphElementType.edges)); } RewriteCommand rewriteCommand = targetConfig.createRewriteCommand(concurrencyConfig, featureToggles); for(ExportSpecification exportSpecification : exportSpecifications) { MasterLabelSchemas masterLabelSchemas = exportSpecification.createMasterLabelSchemas( exportSpecification.getGraphElementType().equals(GraphElementType.nodes) ? nodesFileSpecificLabelSchemas : edgesFileSpecificLabelSchemas ); try { rewriteCommand.execute(masterLabelSchemas); } catch (Exception e) { throw new RuntimeException(e); } } } }
1,045
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/JsonPropertyGraphPrinter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.propertygraph.TokenPrefix; import com.amazonaws.services.neptune.propertygraph.schema.DataType; import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import com.fasterxml.jackson.core.JsonGenerator; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; public class JsonPropertyGraphPrinter implements PropertyGraphPrinter { private final OutputWriter writer; private final JsonGenerator generator; private final LabelSchema labelSchema; private final boolean allowUpdateSchema; private final PrinterOptions printerOptions; private boolean isNullable = false; private final TokenPrefix tokenPrefix; public JsonPropertyGraphPrinter(OutputWriter writer, JsonGenerator generator, LabelSchema labelSchema, PrinterOptions printerOptions) throws IOException { this(writer, generator, labelSchema, printerOptions, false); } public JsonPropertyGraphPrinter(OutputWriter writer, JsonGenerator generator, LabelSchema labelSchema, PrinterOptions printerOptions, boolean allowUpdateSchema) throws IOException { this.writer = writer; this.generator = generator; this.labelSchema = labelSchema; this.allowUpdateSchema = allowUpdateSchema; this.printerOptions = printerOptions; this.tokenPrefix = printerOptions.json().tokenPrefix(); } @Override public String outputId() { return writer.outputId(); } @Override public void printHeaderMandatoryColumns(String... columns) { // Do nothing } @Override public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) { // Do nothing } @Override public void printProperties(Map<?, ?> properties) throws IOException { // print known properties for (PropertySchema propertySchema : labelSchema.propertySchemas()) { Object key = propertySchema.property(); Object value = properties.get(key); if (properties.containsKey(key)) { PropertySchema.PropertyValueMetadata propertyValueMetadata = propertySchema.accept(value, allowUpdateSchema); labelSchema.recordObservation(propertySchema, value, propertyValueMetadata); printProperty(value, propertySchema); } else { if (allowUpdateSchema) { propertySchema.makeNullable(); } } } // Print unknown properties if (allowUpdateSchema) { for (Map.Entry<?, ?> property : properties.entrySet()) { Object key = property.getKey(); if (!labelSchema.containsProperty(key)) { Object value = property.getValue(); PropertySchema propertySchema = new PropertySchema(key); PropertySchema.PropertyValueMetadata propertyValueMetadata = propertySchema.accept(value, true); if (isNullable) { propertySchema.makeNullable(); } labelSchema.put(key, propertySchema); labelSchema.recordObservation(propertySchema, value, propertyValueMetadata); printProperty(value, propertySchema); } } } isNullable = true; } private void printProperty(Object value, PropertySchema propertySchema) throws IOException { DataType dataType = propertySchema.dataType(); String formattedKey = propertySchema.nameWithoutDataType(); boolean isMultiValue = propertySchema.isMultiValue(); printProperty(value, dataType, formattedKey, isMultiValue); } private void printProperty(Object value, DataType dataType, String formattedKey, boolean forceMultiValue) throws IOException { if (forceMultiValue) { List<?> values = isList(value) ? (List<?>) value : Collections.singletonList(value); generator.writeFieldName(formattedKey); generator.writeStartArray(); for (Object v : values) { dataType.printTo(generator, v); } generator.writeEndArray(); } else { if (isList(value)) { List<?> values = (List<?>) value; if (values.size() != 1 || printerOptions.json().strictCardinality()) { generator.writeFieldName(formattedKey); generator.writeStartArray(); for (Object v : values) { dataType.printTo(generator, v); } generator.writeEndArray(); } else { dataType.printTo(generator, formattedKey, values.get(0)); } } else { dataType.printTo(generator, formattedKey, value); } } } @Override public void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException { printProperties(properties); } @Override public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException { printProperties(properties); } @Override public void printEdge(String id, String label, String from, String to) throws IOException { printEdge(id, label, from, to, null, null); } @Override public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException { generator.writeStringField( tokenPrefix.format("id"), id); generator.writeStringField(tokenPrefix.format("label"), label); generator.writeStringField(tokenPrefix.format("from"), from); generator.writeStringField(tokenPrefix.format("to"), to); if (fromLabels != null) { printProperty(fromLabels, DataType.String, tokenPrefix.format("fromLabels"), true); } if (toLabels != null) { printProperty(toLabels, DataType.String, tokenPrefix.format("toLabels"), true); } } @Override public void printNode(String id, List<String> labels) throws IOException { generator.writeStringField(tokenPrefix.format("id"), id); printProperty(labels, DataType.String, tokenPrefix.format("label"), true); } @Override public void printStartRow() throws IOException { writer.startCommit(); generator.writeStartObject(); } @Override public void printEndRow() throws IOException { generator.writeEndObject(); generator.flush(); writer.endCommit(); } @Override public void close() throws Exception { generator.close(); writer.close(); } private boolean isList(Object value) { return value instanceof List<?>; } }
1,046
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/QueryWriter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import java.io.IOException; import java.util.Map; public class QueryWriter implements LabelWriter<Map<?, ?>> { private final PropertyGraphPrinter propertyGraphPrinter; public QueryWriter(PropertyGraphPrinter propertyGraphPrinter) { this.propertyGraphPrinter = propertyGraphPrinter; } @Override public void handle(Map<?, ?> properties, boolean allowTokens) throws IOException { propertyGraphPrinter.printStartRow(); propertyGraphPrinter.printProperties(properties); propertyGraphPrinter.printEndRow(); } @Override public void close() throws Exception { propertyGraphPrinter.close(); } @Override public String outputId() { return propertyGraphPrinter.outputId(); } }
1,047
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/NeptuneStreamsSimpleJsonPropertyGraphPrinter.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.io; import com.amazonaws.services.neptune.io.OutputWriter; import com.amazonaws.services.neptune.propertygraph.schema.DataType; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; import com.fasterxml.jackson.core.JsonGenerator; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; public class NeptuneStreamsSimpleJsonPropertyGraphPrinter implements PropertyGraphPrinter { // private static final AtomicLong COMMIT_NUM_GENERATOR = new AtomicLong(1); private final OutputWriter writer; private final JsonGenerator generator; // private long commitNum = 1; // private int opNum = 1; public NeptuneStreamsSimpleJsonPropertyGraphPrinter(OutputWriter writer, JsonGenerator generator) throws IOException { this.writer = writer; this.generator = generator; } @Override public String outputId() { return writer.outputId(); } @Override public void printHeaderMandatoryColumns(String... columns) { // Do nothing } @Override public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) { // Do nothing } @Override public void printProperties(Map<?, ?> properties) throws IOException { throw new RuntimeException("Neptune Streams simple JSON is not supported for this command"); } @Override public void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException { printProperties(properties); } @Override public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException { for (Map.Entry<?, ?> entry : properties.entrySet()) { String key = String.valueOf(entry.getKey()); Object value = entry.getValue(); if (isList(value)) { List<?> values = (List<?>) value; for (Object o : values) { PropertySchema propertySchema = new PropertySchema(key); propertySchema.accept(o, true); printRecord(id, streamOperation, key, o, propertySchema.dataType()); } } else { PropertySchema propertySchema = new PropertySchema(key); propertySchema.accept(value, true); printRecord(id, streamOperation, key, value, propertySchema.dataType()); } } } @Override public void printEdge(String id, String label, String from, String to) throws IOException { printEdge(id, label, from, to, null, null); } @Override public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException { printRecord(id, "e", "label", label, DataType.String, from, to); } @Override public void printNode(String id, List<String> labels) throws IOException { for (String l : labels) { printRecord(id, "vl", "label", l, DataType.String); } } @Override public void printStartRow() throws IOException { // commitNum = COMMIT_NUM_GENERATOR.getAndIncrement(); // opNum = 1; writer.startCommit(); } @Override public void printEndRow() throws IOException { generator.flush(); writer.endCommit(); } @Override public void close() throws Exception { generator.close(); writer.close(); } private void printRecord(String id, String streamOperation, String key, Object value, DataType dataType) throws IOException { printRecord(id, streamOperation, key, value, dataType, null, null); } private void printRecord(String id, String streamOperation, String key, Object value, DataType dataType, String from, String to) throws IOException { writer.startOp(); generator.writeStartObject(); //generator.writeNumberField("commitNum", commitNum); //generator.writeNumberField("opNum", opNum++); generator.writeStringField("id", id); if (from != null) { generator.writeStringField("from", from); } else { generator.writeStringField("from", ""); } if (to != null) { generator.writeStringField("to", to); } else { generator.writeStringField("to", ""); } generator.writeStringField("type", streamOperation); generator.writeStringField("key", key); dataType.printAsStringTo(generator, "value", value); generator.writeStringField("dataType", dataType.name()); generator.writeStringField("s", ""); generator.writeStringField("p", ""); generator.writeStringField("o", ""); generator.writeStringField("g", ""); generator.writeStringField("stmt", ""); //generator.writeStringField("op", "ADD"); generator.writeEndObject(); generator.writeRaw(writer.lineSeparator()); generator.flush(); writer.endOp(); } private boolean isList(Object value) { return value instanceof List<?>; } }
1,048
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/QueriesEdgeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.T; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class QueriesEdgeResult implements PGResult { private final Map<?, ?> edgeMap; private final Map<?, ?> properties; public QueriesEdgeResult(Map<?, ?> input) { edgeMap = input; properties = new HashMap<>(input); properties.remove(T.label); properties.remove(T.id); properties.remove(Direction.OUT); properties.remove(Direction.IN); } @Override public GraphElementType getGraphElementType() { return GraphElementType.edges; } public List<String> getLabel() { return Collections.singletonList(String.valueOf(edgeMap.get(T.label))); } @Override public String getId() { return String.valueOf(edgeMap.get(T.id)); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) properties; } @Override public String getFrom() { return String.valueOf(((Map<String, Object>)edgeMap.get(Direction.OUT)).get(T.id)); } @Override public String getTo() { return String.valueOf(((Map<String, Object>)edgeMap.get(Direction.IN)).get(T.id)); } @Override public List<String> getFromLabels() { return Collections.singletonList(String.valueOf(((Map<String, Object>)edgeMap.get(Direction.OUT)).get(T.label))); } @Override public List<String> getToLabels() { return Collections.singletonList(String.valueOf(((Map<String, Object>)edgeMap.get(Direction.IN)).get(T.label))); } }
1,049
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/ExportPGNodeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import java.util.List; import java.util.Map; public class ExportPGNodeResult implements PGResult { private final Map<String, Object> nodeMap; public ExportPGNodeResult(Map<String, Object> input) { nodeMap = input; } @Override public GraphElementType getGraphElementType() { return GraphElementType.nodes; } @Override public List<String> getLabel() { return (List<String>) nodeMap.get("~label"); } @Override public String getId() { return String.valueOf(nodeMap.get("~id")); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) nodeMap.get("properties"); } @Override public String getFrom() { throw new IllegalStateException("Illegal attempt to getFrom() from a Node Result"); } @Override public String getTo() { throw new IllegalStateException("Illegal attempt to getTo() from a Node Result"); } @Override public List<String> getFromLabels() { throw new IllegalStateException("Illegal attempt to getFromLabels() from a Node Result"); } @Override public List<String> getToLabels() { throw new IllegalStateException("Illegal attempt to getToLabels() from a Node Result"); } }
1,050
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/PGEdgeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import java.util.ArrayList; import java.util.List; import java.util.Map; public class PGEdgeResult implements PGResult{ private final Map<String, Object> edgeMap; public PGEdgeResult(Map<String, Object> input) { edgeMap = input; } @Override public GraphElementType getGraphElementType() { return GraphElementType.nodes; } @Override public List<String> getLabel() { List<String> labels = new ArrayList<>(); labels.add(String.valueOf(edgeMap.get("~label"))); return labels; } @Override public String getId() { return String.valueOf(edgeMap.get("~id")); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) edgeMap.get("properties"); } @Override public String getFrom() { return String.valueOf(edgeMap.get("~from")); } @Override public String getTo() { return String.valueOf(edgeMap.get("~to")); } @Override public List<String> getFromLabels() { return (List<String>) edgeMap.get("~fromLabels"); } @Override public List<String> getToLabels() { return (List<String>) edgeMap.get("~toLabels"); } }
1,051
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/PGResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import java.util.List; import java.util.Map; public interface PGResult { public GraphElementType getGraphElementType(); public List<String> getLabel(); public String getId(); public Map<String, Object> getProperties(); public String getFrom(); public String getTo(); public List<String> getFromLabels(); public List<String> getToLabels(); }
1,052
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/QueriesNodeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import org.apache.tinkerpop.gremlin.structure.T; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.ArrayList; public class QueriesNodeResult implements PGResult { private final Map<?, ?> nodeMap; private final Map<?, ?> properties; public QueriesNodeResult(Map<?, ?> input) { nodeMap = input; properties = new HashMap<>(input); properties.remove(T.label); properties.remove(T.id); } @Override public GraphElementType getGraphElementType() { return GraphElementType.nodes; } public List<String> getLabel() { return Collections.singletonList(String.valueOf(nodeMap.get(T.label))); } @Override public String getId() { return String.valueOf(nodeMap.get(T.id)); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) properties; } @Override public String getFrom() { throw new IllegalStateException("Illegal attempt to getFrom() from a Node Result"); } @Override public String getTo() { throw new IllegalStateException("Illegal attempt to getTo() from a Node Result"); } @Override public List<String> getFromLabels() { throw new IllegalStateException("Illegal attempt to getFromLabels() from a Node Result"); } @Override public List<String> getToLabels() { throw new IllegalStateException("Illegal attempt to getToLabels() from a Node Result"); } }
1,053
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/GraphSchema.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.io.Jsonizable; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.Collection; import java.util.HashMap; import java.util.Map; public class GraphSchema implements Jsonizable<Boolean> { public static GraphSchema fromJson(JsonNode json) { Map<GraphElementType, GraphElementSchemas> graphElementsSchemas = new HashMap<>(); for (GraphElementType graphElementType : GraphElementType.values()) { JsonNode node = json.path(graphElementType.name()); if (!node.isMissingNode() && node.isArray()) { graphElementsSchemas.put(graphElementType, GraphElementSchemas.fromJson((ArrayNode) node)); } } return new GraphSchema(graphElementsSchemas); } private final Map<GraphElementType, GraphElementSchemas> graphElementsSchemas; public GraphSchema() { this(new HashMap<>()); } public GraphSchema(Map<GraphElementType, GraphElementSchemas> graphElementsSchemas) { this.graphElementsSchemas = graphElementsSchemas; } public void update(GraphElementType graphElementType, Map<?, Object> properties, boolean allowStructuralElements) { graphElementSchemasFor(graphElementType).update(properties, allowStructuralElements); } public GraphElementSchemas copyOfGraphElementSchemasFor(GraphElementType graphElementType) { return graphElementSchemasFor(graphElementType).createCopy(); } public GraphElementSchemas graphElementSchemasFor(GraphElementType graphElementType) { if (!graphElementsSchemas.containsKey(graphElementType)) { graphElementsSchemas.put(graphElementType, new GraphElementSchemas()); } return graphElementsSchemas.get(graphElementType); } public Collection<GraphElementSchemas> graphElementSchemas() { return graphElementsSchemas.values(); } public boolean isEmpty() { return graphElementsSchemas.isEmpty(); } public boolean hasNodeSchemas() { return graphElementsSchemas.containsKey(GraphElementType.nodes); } public boolean hasEdgeSchemas() { return graphElementsSchemas.containsKey(GraphElementType.edges); } @Override public JsonNode toJson(Boolean includeFilenames) { ObjectNode json = JsonNodeFactory.instance.objectNode(); for (Map.Entry<GraphElementType, GraphElementSchemas> entry : graphElementsSchemas.entrySet()) { String key = entry.getKey().name(); ArrayNode arrayNode = entry.getValue().toJson(includeFilenames); json.set(key, arrayNode); } return json; } }
1,054
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/GraphElementType.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.propertygraph.EdgesClient; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.GraphClient; import com.amazonaws.services.neptune.propertygraph.NodesClient; import com.amazonaws.services.neptune.propertygraph.io.EdgesWriterFactory; import com.amazonaws.services.neptune.propertygraph.io.NodesWriterFactory; import com.amazonaws.services.neptune.propertygraph.io.WriterFactory; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Arrays; import java.util.Collection; public enum GraphElementType { nodes { @Override public Collection<String> tokenNames() { return Arrays.asList("~id", "~label"); } @Override public GraphClient<? extends PGResult> graphClient(GraphTraversalSource g, boolean tokensOnly, ExportStats stats, FeatureToggles featureToggles) { return new NodesClient(g, tokensOnly, stats, featureToggles); } @Override public WriterFactory<? extends PGResult> writerFactory() { return new NodesWriterFactory(); } }, edges { @Override public Collection<String> tokenNames() { return Arrays.asList("~id", "~label", "~from", "~to"); } @Override public GraphClient<? extends PGResult> graphClient(GraphTraversalSource g, boolean tokensOnly, ExportStats stats, FeatureToggles featureToggles) { return new EdgesClient(g, tokensOnly, stats, featureToggles); } @Override public WriterFactory<? extends PGResult> writerFactory() { return new EdgesWriterFactory(); } }; public abstract Collection<String> tokenNames(); public abstract GraphClient<? extends PGResult> graphClient(GraphTraversalSource g, boolean tokensOnly, ExportStats stats, FeatureToggles featureToggles); public abstract WriterFactory<? extends PGResult> writerFactory(); }
1,055
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/PropertySchemaStats.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import java.util.EnumMap; import java.util.Map; import java.util.stream.Collectors; public class PropertySchemaStats { private final Object property; private final boolean lock; private int minCardinality; private int maxCardinality; private long observationCount; private long numberValuesCount; private final EnumMap<DataType, Integer> dataTypeCounts; public PropertySchemaStats(Object property) { this(property, -1, -1, 0, 0, new EnumMap<>(DataType.class), false); } public PropertySchemaStats(Object property, int minCardinality, int maxCardinality, long observationCount, long numberValuesCount, EnumMap<DataType, Integer> dataTypeCounts, boolean lock) { this.property = property; this.minCardinality = minCardinality; this.maxCardinality = maxCardinality; this.observationCount = observationCount; this.numberValuesCount = numberValuesCount; this.dataTypeCounts = dataTypeCounts; this.lock = lock; } public void recordObservation(PropertySchema.PropertyValueMetadata propertyValueMetadata) { observationCount++; if (!lock) { int size = propertyValueMetadata.size(); if (minCardinality < 0) { minCardinality = size; maxCardinality = size; } if (size > maxCardinality) { maxCardinality = size; } if (size < minCardinality) { minCardinality = size; } numberValuesCount += size; propertyValueMetadata.addTo(dataTypeCounts); } } public Object property() { return property; } public long observationCount() { return observationCount; } public long numberValuesCount() { return numberValuesCount; } public int minCardinality() { return minCardinality; } public int maxCardinality() { return maxCardinality; } public boolean isUniformCardinality() { return minCardinality == maxCardinality; } public EnumMap<DataType, Integer> dataTypeCounts() { return dataTypeCounts; } public PropertySchemaStats union(PropertySchemaStats other) { int newMinCardinality = Math.min(minCardinality, other.minCardinality()); int newMaxCardinality = Math.max(maxCardinality, other.maxCardinality()); long newObservationCount = observationCount + other.observationCount(); long newNumberValuesCount = numberValuesCount + other.numberValuesCount(); EnumMap<DataType, Integer> newDataTypeCounts = new EnumMap<DataType, Integer>(DataType.class); newDataTypeCounts.putAll(dataTypeCounts); for (Map.Entry<DataType, Integer> entry : other.dataTypeCounts.entrySet()) { DataType key = entry.getKey(); int i = newDataTypeCounts.containsKey(key) ? newDataTypeCounts.get(key) : 0; newDataTypeCounts.put(key, i + entry.getValue()); } return new PropertySchemaStats(property, newMinCardinality, newMaxCardinality, newObservationCount, newNumberValuesCount, newDataTypeCounts, false); } public PropertySchemaStats createCopy() { EnumMap<DataType, Integer> newDataTypeCounts = new EnumMap<DataType, Integer>(DataType.class); newDataTypeCounts.putAll(dataTypeCounts); return new PropertySchemaStats(property, minCardinality, maxCardinality, observationCount, numberValuesCount, newDataTypeCounts, false); } public PropertySchemaStats createLockedCopyForFreshObservations() { return new PropertySchemaStats( property, minCardinality, maxCardinality, 0, numberValuesCount, dataTypeCounts, true ); } @Override public String toString() { String s = dataTypeCounts.entrySet().stream(). map(e -> e.getKey().name() + ":" + e.getValue()). collect(Collectors.joining(",")); return property + " {" + "propertyCount=" + observationCount + ", minCardinality=" + minCardinality + ", maxCardinality=" + maxCardinality + ", recordCount=" + numberValuesCount + ", dataTypeCounts=[" + s + "]" + "}"; } }
1,056
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/MasterLabelSchema.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import java.util.Collection; import java.util.stream.Collectors; public class MasterLabelSchema { private final LabelSchema labelSchema; private final Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas; public MasterLabelSchema( LabelSchema labelSchema, Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas) { this.labelSchema = labelSchema; this.fileSpecificLabelSchemas = fileSpecificLabelSchemas; } public LabelSchema labelSchema(){ return labelSchema; } public Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas(){ return fileSpecificLabelSchemas; } public Collection<String> outputIds() { return fileSpecificLabelSchemas.stream().map(FileSpecificLabelSchema::outputId).collect(Collectors.toList()); } }
1,057
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/MasterLabelSchemas.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; import java.util.Map; public class MasterLabelSchemas { private final Map<Label, MasterLabelSchema> masterLabelSchemas; private final GraphElementType graphElementType; public MasterLabelSchemas(Map<Label, MasterLabelSchema> masterLabelSchemas, GraphElementType graphElementType) { this.masterLabelSchemas = masterLabelSchemas; this.graphElementType = graphElementType; } public Collection<MasterLabelSchema> schemas() { return masterLabelSchemas.values(); } public GraphElementType graphElementType() { return graphElementType; } public GraphElementSchemas toGraphElementSchemas() { GraphElementSchemas graphElementSchemas = new GraphElementSchemas(); for (MasterLabelSchema masterLabelSchema : masterLabelSchemas.values()) { graphElementSchemas.addLabelSchema(masterLabelSchema.labelSchema(), masterLabelSchema.outputIds()); } return graphElementSchemas; } }
1,058
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/DataType.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.io.CsvPrinterOptions; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.time.Instant; import java.time.format.DateTimeFormatter; import java.util.Collection; import java.util.stream.Collectors; public enum DataType { None { @Override public String typeDescription() { return ""; } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { return value; } @Override public int compare(Object v1, Object v2) { return -1; } }, Boolean { @Override public String typeDescription() { return ":bool"; } @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeBoolean((java.lang.Boolean) Boolean.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeBooleanField(key, (java.lang.Boolean) Boolean.convert(value)); } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { return java.lang.Boolean.parseBoolean(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Boolean.compare((boolean) v1, (boolean) v2); } }, Byte { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Byte) Byte.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Byte) Byte.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Byte.parseByte(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Byte.compare((Byte) v1, (Byte) v2); } }, Short { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Short) Short.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Short) Short.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Short.parseShort(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Short.compare((short) v1, (short) v2); } }, Integer { @Override public String typeDescription() { return ":int"; } @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((int) value); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (int) value); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Integer.parseInt(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Integer.compare((int) v1, (int) v2); } }, Long { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Long) Long.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Long) Long.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Long.parseLong(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Long.compare((long) v1, (long) v2); } }, Float { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Float) Float.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Float) Float.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Float.parseFloat(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Float.compare((float) v1, (float) v2); } }, Double { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Double) Double.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Double) Double.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Double.parseDouble(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Double.compare((double) v1, (double) v2); } }, String { @Override public String format(Object value) { return format(value, false); } @Override public String format(Object value, boolean escapeNewline) { java.lang.String escaped = escapeDoubleQuotes(value); if (escapeNewline){ escaped = escapeNewlineChar(escaped); } if (StringUtils.isNotEmpty(escaped)) { return java.lang.String.format("\"%s\"", escaped); } else { return ""; } } private String escapeNewlineChar(String value) { return value.replace("\n", "\\n"); } @Override public String formatList(Collection<?> values, CsvPrinterOptions options) { if (values.isEmpty()) { return ""; } return java.lang.String.format("\"%s\"", values.stream(). map(v -> DataType.escapeSeparators(v, options.multiValueSeparator())). map(DataType::escapeDoubleQuotes). map(v -> options.escapeNewline() ? escapeNewlineChar(v) : v). collect(Collectors.joining(options.multiValueSeparator()))); } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { return java.lang.String.valueOf(value); } @Override public int compare(Object v1, Object v2) { return java.lang.String.valueOf(v1).compareTo(java.lang.String.valueOf(v2)); } }, Date { @Override public String format(Object value) { return format(value, false); } @Override public String format(Object value, boolean escapeNewline) { try { java.util.Date date = (java.util.Date) value; return DateTimeFormatter.ISO_INSTANT.format(date.toInstant()); } catch (ClassCastException e) { return value.toString(); } } @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeString(format(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, format(value)); } @Override public void printAsStringTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, format(value)); } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { if (java.util.Date.class.isAssignableFrom(value.getClass())) { return value; } Instant instant = Instant.parse(value.toString()); return new java.util.Date(instant.toEpochMilli()); } @Override public int compare(Object v1, Object v2) { return ((java.util.Date) v1).compareTo((java.util.Date) v2); } }; public static DataType dataTypeFor(Class<?> cls) { String name = cls.getSimpleName(); try { return DataType.valueOf(name); } catch (IllegalArgumentException e) { return DataType.String; } } public static DataType getBroadestType(DataType oldType, DataType newType) { if (oldType == newType) { return newType; } else if (oldType == None) { return newType; } else if (oldType == Boolean) { return String; } else if (oldType == String || newType == String) { return String; } else { if (newType.ordinal() > oldType.ordinal()) { return newType; } else { return oldType; } } } public static String escapeSeparators(Object value, String separator) { if (separator.isEmpty()) { return value.toString(); } String temp = value.toString().replace("\\" + separator, separator); return temp.replace(separator, "\\" + separator); } public static String escapeDoubleQuotes(Object value) { return value.toString().replace("\"", "\"\""); } public String typeDescription() { return java.lang.String.format(":%s", name().toLowerCase()); } public String format(Object value) { return value.toString(); } public String format(Object value, boolean escapeNewline) { return value.toString(); } public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeString(value.toString()); } public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, value.toString()); } public void printAsStringTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, value.toString()); } public String formatList(Collection<?> values, CsvPrinterOptions options) { return values.stream().map(v -> format(v, options.escapeNewline())).collect(Collectors.joining(options.multiValueSeparator())); } public abstract boolean isNumeric(); public abstract Object convert(Object value); public abstract int compare(Object v1, Object v2); }
1,059
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/GraphElementSchemas.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.tinkerpop.gremlin.structure.T; import java.util.*; import java.util.stream.Collectors; public class GraphElementSchemas { public static GraphElementSchemas fromJson(ArrayNode arrayNode) { GraphElementSchemas graphElementSchemas = new GraphElementSchemas(); for (JsonNode node : arrayNode) { Label label = Label.fromJson(node.path("label")); Collection<String> filenames = new ArrayList<>(); if (node.has("files")) { ArrayNode filenamesArray = (ArrayNode) node.path("files"); for (JsonNode jsonNode : filenamesArray) { filenames.add(jsonNode.textValue()); } } graphElementSchemas.addLabelSchema(new LabelSchema(label), filenames); if (node.has("properties")) { ArrayNode propertiesArray = (ArrayNode) node.path("properties"); for (JsonNode propertyNode : propertiesArray) { if (propertyNode.isObject()) { String key = propertyNode.path("property").textValue(); DataType dataType = propertyNode.has("dataType") ? Enum.valueOf(DataType.class, propertyNode.path("dataType").textValue()) : DataType.None; boolean isMultiValue = propertyNode.has("isMultiValue") && propertyNode.path("isMultiValue").booleanValue(); boolean isNullable = propertyNode.has("isNullable") && propertyNode.path("isNullable").booleanValue(); EnumSet<DataType> allTypes = EnumSet.noneOf(DataType.class); if (propertyNode.has("allTypes") ){ ArrayNode allTypesNode = (ArrayNode) propertyNode.path("allTypes"); for (JsonNode jsonNode : allTypesNode) { allTypes.add(DataType.valueOf(jsonNode.textValue())); } } graphElementSchemas.getSchemaFor(label).put( key, new PropertySchema(key, isNullable, dataType, isMultiValue, allTypes)); } else { String property = propertyNode.textValue(); graphElementSchemas.getSchemaFor(label).put( property, new PropertySchema(property, false, DataType.None, false, EnumSet.noneOf(DataType.class))); } } } } return graphElementSchemas; } private final Map<Label, LabelSchemaContainer> labelSchemas = new HashMap<>(); public void addLabelSchema(LabelSchema labelSchema) { addLabelSchema(labelSchema, Collections.emptyList()); } public void addLabelSchema(LabelSchema labelSchema, Collection<String> outputIds) { labelSchemas.put(labelSchema.label(), new LabelSchemaContainer(labelSchema, outputIds)); } public Collection<LabelSchema> labelSchemas() { return labelSchemas.values().stream().map(LabelSchemaContainer::labelSchema).collect(Collectors.toList()); } public LabelSchema getSchemaFor(Label label) { if (!labelSchemas.containsKey(label)) { addLabelSchema(new LabelSchema(label)); } return labelSchemas.get(label).labelSchema(); } public Collection<String> getOutputIdsFor(Label label) { if (!labelSchemas.containsKey(label)) { return Collections.emptyList(); } return labelSchemas.get(label).outputIds(); } public boolean hasSchemaFor(Label label) { return labelSchemas.containsKey(label); } public void update(Map<?, ?> properties, boolean allowStructuralElements) { Object value = properties.get(T.label); Label label; if (List.class.isAssignableFrom(value.getClass())){ @SuppressWarnings("unchecked") List<String> values = (List<String>) value; label = new Label(values); } else { label = new Label(String.valueOf(value)); } update(label, properties, allowStructuralElements); } public void update(Label label, Map<?, ?> properties, boolean allowStructuralElements) { LabelSchema labelSchema = getSchemaFor(label); for (PropertySchema propertySchema : labelSchema.propertySchemas()) { if (!properties.containsKey(propertySchema.property())) { propertySchema.makeNullable(); } } for (Map.Entry<?, ?> entry : properties.entrySet()) { Object property = entry.getKey(); if (allowStructuralElements || !(isToken(property))) { if (!labelSchema.containsProperty(property)) { labelSchema.put(property, new PropertySchema(property)); } labelSchema.getPropertySchema(property).accept(entry.getValue(), true); } } } public Collection<Label> labels() { return labelSchemas.keySet(); } private boolean isToken(Object key) { return key.equals(T.label) || key.equals(T.id) || key.equals(T.key) || key.equals(T.value); } public ArrayNode toJson() { return toJson(false); } public ArrayNode toJson(boolean includeFilenames) { ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); for (Map.Entry<Label, LabelSchemaContainer> entry : labelSchemas.entrySet()) { Label label = entry.getKey(); ObjectNode labelNode = JsonNodeFactory.instance.objectNode(); labelNode.set("label", label.toJson()); LabelSchema labelSchema = entry.getValue().labelSchema(); ArrayNode propertiesNode = JsonNodeFactory.instance.arrayNode(); for (PropertySchema propertySchema : labelSchema.propertySchemas()) { ArrayNode allTypesNode = JsonNodeFactory.instance.arrayNode(); for (DataType dataType : propertySchema.allTypes()) { allTypesNode.add(dataType.name()); } ObjectNode propertyNode = JsonNodeFactory.instance.objectNode(); propertyNode.put("property", propertySchema.property().toString()); propertyNode.put("dataType", propertySchema.dataType().name()); propertyNode.put("isMultiValue", propertySchema.isMultiValue()); propertyNode.put("isNullable", propertySchema.isNullable()); propertyNode.set("allTypes", allTypesNode); propertiesNode.add(propertyNode); } labelNode.set("properties", propertiesNode); if (includeFilenames){ ArrayNode filesNode = JsonNodeFactory.instance.arrayNode(); for (String outputId : entry.getValue().outputIds()) { filesNode.add(outputId); } labelNode.set("files", filesNode); } arrayNode.add(labelNode); } return arrayNode; } public GraphElementSchemas createCopy() { return fromJson(toJson()); } private static class LabelSchemaContainer { private final LabelSchema labelSchema; private final Collection<String> outputIds; private LabelSchemaContainer(LabelSchema labelSchema, Collection<String> outputIds) { this.labelSchema = labelSchema; this.outputIds = outputIds; } public LabelSchema labelSchema() { return labelSchema; } public Collection<String> outputIds() { return outputIds; } } }
1,060
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/CreateGraphSchemaCommand.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; public interface CreateGraphSchemaCommand { GraphSchema execute() throws Exception; }
1,061
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/ExportSpecification.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggle; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.io.StatusOutputFormat; import com.amazonaws.services.neptune.propertygraph.*; import com.amazonaws.services.neptune.propertygraph.io.ExportPropertyGraphTask; import com.amazonaws.services.neptune.propertygraph.io.GraphElementHandler; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphTargetConfig; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; public class ExportSpecification { private final GraphElementType graphElementType; private final LabelsFilter labelsFilter; private final GremlinFilters gremlinFilters; private final boolean tokensOnly; private final ExportStats stats; private final FeatureToggles featureToggles; public ExportSpecification(GraphElementType graphElementType, LabelsFilter labelsFilter, GremlinFilters gremlinFilters, ExportStats stats, boolean tokensOnly, FeatureToggles featureToggles) { this.graphElementType = graphElementType; this.labelsFilter = labelsFilter; this.gremlinFilters = gremlinFilters; this.tokensOnly = tokensOnly; this.stats = stats; this.featureToggles = featureToggles; } public void scan(GraphSchema graphSchema, GraphTraversalSource g) { if (tokensOnly) { return; } GraphClient<? extends PGResult> graphClient = graphElementType.graphClient(g, tokensOnly, stats, featureToggles); graphClient.queryForSchema( new CreateSchemaHandler(graphElementType, graphSchema), Range.ALL, labelsFilter, gremlinFilters); } public void sample(GraphSchema graphSchema, GraphTraversalSource g, long sampleSize) { if (tokensOnly) { return; } GraphClient<? extends PGResult> graphClient = graphElementType.graphClient(g, tokensOnly, stats, featureToggles); Collection<Label> labels = labelsFilter.getLabelsUsing(graphClient); for (Label label : labels) { graphClient.queryForSchema( new CreateSchemaHandler(graphElementType, graphSchema), new Range(0, sampleSize), labelsFilter.filterFor(label), gremlinFilters); } } public String description() { return labelsFilter.description(graphElementType.name()); } public RangeFactory createRangeFactory(GraphTraversalSource g, RangeConfig rangeConfig, ConcurrencyConfig concurrencyConfig) { return RangeFactory.create( graphElementType.graphClient(g, tokensOnly, stats, featureToggles), labelsFilter, gremlinFilters, rangeConfig, concurrencyConfig); } public ExportPropertyGraphTask createExportTask(GraphSchema graphSchema, GraphTraversalSource g, PropertyGraphTargetConfig targetConfig, GremlinFilters gremlinFilters, RangeFactory rangeFactory, Status status, AtomicInteger index, AtomicInteger fileDescriptorCount, int maxFileDescriptorCount) { return new ExportPropertyGraphTask( graphSchema.copyOfGraphElementSchemasFor(graphElementType), labelsFilter, graphElementType.graphClient(g, tokensOnly, stats, featureToggles), graphElementType.writerFactory(), targetConfig, rangeFactory, gremlinFilters, status, index, fileDescriptorCount, maxFileDescriptorCount ); } public MasterLabelSchemas createMasterLabelSchemas(Collection<FileSpecificLabelSchemas> fileSpecificLabelSchemasCollection) { Set<Label> labels = new HashSet<>(); fileSpecificLabelSchemasCollection.forEach(s -> labels.addAll(s.labels())); Map<Label, MasterLabelSchema> masterLabelSchemas = new HashMap<>(); for (Label label : labels) { LabelSchema masterLabelSchema = new LabelSchema(label); Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas = new ArrayList<>(); for (FileSpecificLabelSchemas fileSpecificLabelSchemasForTask : fileSpecificLabelSchemasCollection) { if (fileSpecificLabelSchemasForTask.hasSchemasForLabel(label)) { Set<LabelSchema> labelSchemaSet = new HashSet<>(); for (FileSpecificLabelSchema fileSpecificLabelSchema : fileSpecificLabelSchemasForTask.fileSpecificLabelSchemasFor(label)) { fileSpecificLabelSchemas.add(fileSpecificLabelSchema); labelSchemaSet.add(fileSpecificLabelSchema.labelSchema()); } for (LabelSchema labelSchema : labelSchemaSet) { masterLabelSchema = masterLabelSchema.union(labelSchema); } } } masterLabelSchemas.put( label, new MasterLabelSchema(masterLabelSchema, fileSpecificLabelSchemas)); } return new MasterLabelSchemas(masterLabelSchemas, graphElementType); } public Collection<ExportSpecification> splitByLabel() { if (graphElementType == GraphElementType.edges || featureToggles.containsFeature(FeatureToggle.ExportByIndividualLabels)) { return labelsFilter.split().stream() .map(l -> new ExportSpecification(graphElementType, l, gremlinFilters, stats, tokensOnly, featureToggles)) .collect(Collectors.toList()); } else { return Collections.singletonList(this); } } public GraphElementType getGraphElementType() { return graphElementType; } public LabelsFilter getLabelsFilter() { return labelsFilter; } private static class CreateSchemaHandler implements GraphElementHandler<Map<?, Object>> { private final GraphElementType graphElementType; private final GraphSchema graphSchema; private final Status status; private CreateSchemaHandler(GraphElementType graphElementType, GraphSchema graphSchema) { this.graphElementType = graphElementType; this.graphSchema = graphSchema; this.status = new Status(StatusOutputFormat.Dot); } @Override public void handle(Map<?, Object> properties, boolean allowTokens) { status.update(); graphSchema.update(graphElementType, properties, allowTokens); } @Override public void close() throws Exception { // Do nothing } } }
1,062
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/FileSpecificLabelSchemas.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; public class FileSpecificLabelSchemas { private final Map<Label, Collection<FileSpecificLabelSchema>> fileSpecificLabelSchemas = new HashMap<>(); public void add(String outputId, PropertyGraphExportFormat format, LabelSchema labelSchema) { if (!fileSpecificLabelSchemas.containsKey(labelSchema.label())) { fileSpecificLabelSchemas.put(labelSchema.label(), new ArrayList<>()); } Collection<FileSpecificLabelSchema> schemas = fileSpecificLabelSchemas.get(labelSchema.label()); for (FileSpecificLabelSchema schema : schemas) { if (schema.outputId().equals(outputId)){ return; } } schemas.add(new FileSpecificLabelSchema(outputId, format, labelSchema)); } public Collection<Label> labels() { return fileSpecificLabelSchemas.keySet(); } public boolean hasSchemasForLabel(Label label){ return fileSpecificLabelSchemas.containsKey(label); } public Collection<FileSpecificLabelSchema> fileSpecificLabelSchemasFor(Label label){ return fileSpecificLabelSchemas.get(label); } }
1,063
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/TokensOnly.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; public enum TokensOnly { off, nodes { @Override public boolean nodeTokensOnly() { return true; } }, edges { @Override public boolean edgeTokensOnly() { return true; } }, both { @Override public boolean nodeTokensOnly() { return true; } @Override public boolean edgeTokensOnly() { return true; } }; public boolean nodeTokensOnly() { return false; } public boolean edgeTokensOnly() { return false; } }
1,064
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/LabelSchema.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import org.apache.commons.lang.StringUtils; import java.util.*; public class LabelSchema { private final Label label; private final Map<Object, PropertySchema> propertySchemas = new LinkedHashMap<>(); private final Map<Object, PropertySchemaStats> propertySchemaStats = new LinkedHashMap<>(); public LabelSchema(Label label) { this.label = label; } public void put(Object property, PropertySchema propertySchema) { put(property, propertySchema, new PropertySchemaStats(property)); } private void put(Object property, PropertySchema propertySchema, PropertySchemaStats stats) { if (!property.equals(propertySchema.property())) { throw new IllegalStateException(String.format("Property name mismatch: %s, %s", property, propertySchema.property())); } propertySchemas.put(property, propertySchema); propertySchemaStats.put(property, stats); } public boolean containsProperty(Object property) { return propertySchemas.containsKey(property); } public PropertySchema getPropertySchema(Object property) { return propertySchemas.get(property); } public void recordObservation(PropertySchema propertySchema, Object value, PropertySchema.PropertyValueMetadata propertyValueMetadata) { if (propertySchema.isNullable()) { if (StringUtils.isNotEmpty(String.valueOf(value))) { propertySchemaStats.get(propertySchema.property()).recordObservation(propertyValueMetadata); } } else { propertySchemaStats.get(propertySchema.property()).recordObservation(propertyValueMetadata); } } public PropertySchemaStats getPropertySchemaStats(Object property) { return propertySchemaStats.get(property); } public Collection<PropertySchema> propertySchemas() { return propertySchemas.values(); } public Collection<PropertySchemaStats> propertySchemaStats() { return propertySchemaStats.values(); } public int propertyCount() { return propertySchemas.size(); } public Label label() { return label; } public LabelSchema createCopy() { LabelSchema result = new LabelSchema(label.createCopy()); for (PropertySchema schema : propertySchemas.values()) { Object property = schema.property(); result.put(property, schema.createCopy(), propertySchemaStats.get(property).createCopy()); } return result; } public void initStats() { Set<Object> keys = propertySchemaStats.keySet(); for (Object key : keys) { PropertySchemaStats oldStats = this.propertySchemaStats.get(key); this.propertySchemaStats.put( key, oldStats.createLockedCopyForFreshObservations()); } } public LabelSchema union(LabelSchema other) { LabelSchema result = createCopy(); for (PropertySchema otherSchema : other.propertySchemas()) { Object property = otherSchema.property(); PropertySchemaStats otherSchemaStats = other.getPropertySchemaStats(property); if (result.containsProperty(property)) { PropertySchema oldSchema = result.getPropertySchema(property); PropertySchema newSchema = oldSchema.union(otherSchema); PropertySchemaStats oldStats = result.getPropertySchemaStats(property); PropertySchemaStats newStats = oldStats.union(otherSchemaStats); result.put(property, newSchema, newStats); } else { result.put(property, otherSchema.createCopy(), otherSchemaStats.createCopy()); } } return result; } public boolean isSameAs(LabelSchema other) { if (!label().equals(other.label())) { return false; } if (propertySchemas().size() != other.propertySchemas().size()) { return false; } Iterator<PropertySchema> thisIterator = propertySchemas().iterator(); Iterator<PropertySchema> otherIterator = other.propertySchemas().iterator(); while (thisIterator.hasNext()) { PropertySchema thisPropertySchema = thisIterator.next(); PropertySchema otherPropertySchema = otherIterator.next(); if (!thisPropertySchema.equals(otherPropertySchema)) { return false; } } return true; } }
1,065
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/PropertySchema.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import java.util.*; public class PropertySchema { private final Object property; private final boolean inferDataType; private boolean isNullable; private DataType dataType; private boolean isMultiValue; private final EnumSet<DataType> allTypes; public PropertySchema(Object property) { this(property, false, DataType.None, false, EnumSet.noneOf(DataType.class)); } public PropertySchema(Object property, boolean isNullable, DataType dataType, boolean isMultiValue, EnumSet<DataType> allTypes) { this.property = property; this.inferDataType = dataType == DataType.None; this.isNullable = isNullable; this.dataType = dataType; this.isMultiValue = isMultiValue; this.allTypes = allTypes; } public Object property() { return property; } public PropertyValueMetadata accept(Object value, boolean updateDataType) { /* What should we do of the user specifies a datatype in a filter, but the actual values cannot be cast to that type? At present, neptune-export will respect the user-specified type in the output schema (config.json), and in CSV headers (if appropriate for export format). But perhaps the tool should seek to guarantee that the output schema allows for all values in the exported dataset? */ PropertyValueMetadata propertyValueMetadata = new PropertyValueMetadata(); int size = 1; if (isList(value)) { List<?> values = (List<?>) value; size = values.size(); if (size != 1) { isMultiValue = true; } if (inferDataType || updateDataType) { for (Object v : values) { DataType newType = DataType.dataTypeFor(v.getClass()); allTypes.add(newType); propertyValueMetadata.updateFor(newType); dataType = DataType.getBroadestType(dataType, newType); } } } else { if (inferDataType || updateDataType) { DataType newType = DataType.dataTypeFor(value.getClass()); allTypes.add(newType); propertyValueMetadata.updateFor(newType); dataType = DataType.getBroadestType(dataType, newType); } } return propertyValueMetadata; } public void makeNullable() { isNullable = true; } private boolean isList(Object value) { return value instanceof List<?>; } public DataType dataType() { return dataType; } public boolean isMultiValue() { return isMultiValue; } public boolean isNullable() { return isNullable; } public Collection<DataType> allTypes() { return allTypes; } public String nameWithDataType(boolean escapeCharacters) { return isMultiValue ? String.format("%s%s[]", propertyName(property, escapeCharacters), dataType.typeDescription()) : String.format("%s%s", propertyName(property, escapeCharacters), dataType.typeDescription()); } public String nameWithoutDataType(boolean escapeCharacters) { return propertyName(property, escapeCharacters); } public String nameWithDataType() { return nameWithDataType(false); } public String nameWithoutDataType() { return nameWithoutDataType(false); } private String propertyName(Object key, boolean escapeCharacters) { if (key.equals(org.apache.tinkerpop.gremlin.structure.T.label)) { return "~label"; } if (key.equals(org.apache.tinkerpop.gremlin.structure.T.id)) { return "~id"; } if (key.equals(org.apache.tinkerpop.gremlin.structure.T.key)) { return "~key"; } if (key.equals(org.apache.tinkerpop.gremlin.structure.T.value)) { return "~value"; } if (escapeCharacters) { return String.valueOf(key).replace(":", "\\:"); } else { return String.valueOf(key); } } @Override public String toString() { return "PropertySchema{" + "property=" + property + ", isNullable=" + isNullable + ", dataType=" + dataType + ", isMultiValue=" + isMultiValue + ", allTypes=" + allTypes + '}'; } public PropertySchema createCopy() { return new PropertySchema(property.toString(), isNullable, dataType, isMultiValue, allTypes); } public PropertySchema union(PropertySchema other) { if (other.isMultiValue() == isMultiValue && other.dataType() == dataType && other.isNullable() == isNullable) { return this; } boolean newIsNullable = other.isNullable() || isNullable; boolean newIsMultiValue = other.isMultiValue() || isMultiValue; DataType newDataType = DataType.getBroadestType(dataType, other.dataType()); EnumSet<DataType> unionAllTypes = allTypes.clone(); unionAllTypes.addAll(other.allTypes); return new PropertySchema( property.toString(), newIsNullable, newDataType, newIsMultiValue, unionAllTypes ); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PropertySchema schema = (PropertySchema) o; return isNullable == schema.isNullable && isMultiValue == schema.isMultiValue && property.equals(schema.property) && dataType == schema.dataType; } @Override public int hashCode() { return Objects.hash(property, isNullable, dataType, isMultiValue); } public static class PropertyValueMetadata { private final EnumMap<DataType, Integer> dataTypeCounts = new EnumMap<DataType, Integer>(DataType.class); public int size(){ int i = 0; for (Integer value : dataTypeCounts.values()) { i += value; } return i; } void updateFor(DataType dataType){ int i = dataTypeCounts.containsKey(dataType) ? dataTypeCounts.get(dataType): 0; dataTypeCounts.put(dataType, i + 1); } public void addTo(EnumMap<DataType, Integer> m){ for (Map.Entry<DataType, Integer> entry : dataTypeCounts.entrySet()) { DataType key = entry.getKey(); int i = m.containsKey(key) ? m.get(key) : 0; m.put(key, i + entry.getValue()); } } } }
1,066
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/FileSpecificLabelSchema.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat; public class FileSpecificLabelSchema { private final String outputId; private final PropertyGraphExportFormat format; private final LabelSchema labelSchema; public FileSpecificLabelSchema(String outputId, PropertyGraphExportFormat format, LabelSchema labelSchema) { this.outputId = outputId; this.format = format; this.labelSchema = labelSchema; } public String outputId() { return outputId; } public PropertyGraphExportFormat getFormat() { return format; } public LabelSchema labelSchema() { return labelSchema; } }
1,067
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/CreateGraphSchemaFromScan.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.util.Activity; import com.amazonaws.services.neptune.util.Timer; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Collection; public class CreateGraphSchemaFromScan implements CreateGraphSchemaCommand { private final Collection<ExportSpecification> exportSpecifications; private final GraphTraversalSource g; public CreateGraphSchemaFromScan(Collection<ExportSpecification> exportSpecifications, GraphTraversalSource g) { this.exportSpecifications = exportSpecifications; this.g = g; } @Override public GraphSchema execute() { GraphSchema graphSchema = new GraphSchema(); for (ExportSpecification exportSpecification : exportSpecifications) { Timer.timedActivity("creating " + exportSpecification.description() + " schema from graph scan", (Activity.Runnable) () -> { System.err.println("Creating " + exportSpecification.description() + " schema"); exportSpecification.scan(graphSchema, g); }); } return graphSchema; } }
1,068
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/CreateGraphSchemaFromSample.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.util.Activity; import com.amazonaws.services.neptune.util.Timer; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Collection; public class CreateGraphSchemaFromSample implements CreateGraphSchemaCommand { private final Collection<ExportSpecification> exportSpecifications; private final GraphTraversalSource g; private final long sampleSize; public CreateGraphSchemaFromSample(Collection<ExportSpecification> exportSpecifications, GraphTraversalSource g, long sampleSize) { this.exportSpecifications = exportSpecifications; this.sampleSize = sampleSize; this.g = g; } @Override public GraphSchema execute() { GraphSchema graphSchema = new GraphSchema(); for (ExportSpecification exportSpecification : exportSpecifications) { Timer.timedActivity("creating " + exportSpecification.description() + " schema from sampling graph", (Activity.Runnable) () -> { System.err.println("Creating " + exportSpecification.description() + " schema"); exportSpecification.sample(graphSchema, g, sampleSize); }); } return graphSchema; } }
1,069
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/airline/NameQueriesTypeConverter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.airline; import com.amazonaws.services.neptune.propertygraph.NamedQueries; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.rvesse.airline.model.ArgumentsMetadata; import com.github.rvesse.airline.model.OptionMetadata; import com.github.rvesse.airline.parser.ParseState; import com.github.rvesse.airline.types.TypeConverter; import com.github.rvesse.airline.types.TypeConverterProvider; import com.github.rvesse.airline.types.numerics.NumericTypeConverter; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; public class NameQueriesTypeConverter implements TypeConverter, TypeConverterProvider { @Override public Object convert(String s, Class<?> aClass, String value) { ObjectMapper objectMapper = new ObjectMapper(); try { JsonNode jsonNode = objectMapper.readTree(value); return NamedQueries.fromJson(jsonNode); } catch (JsonProcessingException e) { // Not JSON representation of queries, so continue } int i = value.indexOf("="); String name; String gremlinQueries; if (i < 0){ name = "query"; gremlinQueries = value; } else { name = value.substring(0, i).trim(); gremlinQueries = value.substring(i + 1); } List<String> queries = Arrays.stream(gremlinQueries.split(";")). map(String::trim). collect(Collectors.toList()); return new NamedQueries(name, queries); } @Override public void setNumericConverter(NumericTypeConverter numericTypeConverter) { // Do nothing } @Override public <T> TypeConverter getTypeConverter(OptionMetadata optionMetadata, ParseState<T> parseState) { return this; } @Override public <T> TypeConverter getTypeConverter(ArgumentsMetadata argumentsMetadata, ParseState<T> parseState) { return this; } }
1,070
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/ProfilesConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles; import java.util.Collection; public class ProfilesConfig { private final Collection<String> profiles; public ProfilesConfig(Collection<String> profiles) { this.profiles = profiles; } public boolean containsProfile(String profile){ return profiles.contains(profile); } }
1,071
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/incremental_export/IncrementalExportEventHandler.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.incremental_export; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.cluster.EventId; import com.amazonaws.services.neptune.cluster.GetLastEventId; import com.amazonaws.services.neptune.cluster.NeptuneClusterMetadata; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.export.CompletionFileWriter; import com.amazonaws.services.neptune.export.ExportToS3NeptuneExportEventHandler; import com.amazonaws.services.neptune.export.NeptuneExportServiceEventHandler; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.rdf.io.RdfExportFormat; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import org.slf4j.LoggerFactory; import java.util.concurrent.atomic.AtomicLong; public class IncrementalExportEventHandler implements NeptuneExportServiceEventHandler, CompletionFileWriter { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(IncrementalExportEventHandler.class); private final long timestamp; private final AtomicLong commitNum = new AtomicLong(0); private final AtomicLong opNum = new AtomicLong(0); private final String exportId; private final String stageId; private final String command; public IncrementalExportEventHandler(ObjectNode additionalParams) { this.timestamp = System.currentTimeMillis(); JsonNode incrementalExport = additionalParams.path("incremental_export"); this.exportId = incrementalExport.path("exportId").textValue();; this.stageId = incrementalExport.path("stageId").textValue(); this.command = incrementalExport.path("command").textValue(); logger.info("Incremental export params: exportId: {}, stageId: {}, command: {}", exportId, stageId, command); } @Override public void updateCompletionFile(ObjectNode completionFilePayload) { ArrayNode partitions = JsonNodeFactory.instance.arrayNode(); ObjectNode partition = JsonNodeFactory.instance.objectNode(); partition.put("name", "timestamp"); partition.put("value", String.valueOf(timestamp)); partitions.add(partition); ObjectNode lastEventId = JsonNodeFactory.instance.objectNode(); lastEventId.put("commitNum", commitNum.get()); lastEventId.put("opNum", opNum.get()); ObjectNode incrementalExportNode = JsonNodeFactory.instance.objectNode(); completionFilePayload.set("incrementalExport", incrementalExportNode); incrementalExportNode.put("exportId", exportId); incrementalExportNode.put("stageId", stageId); incrementalExportNode.set("partitions", partitions); incrementalExportNode.set("lastEventId", lastEventId); } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { if (args.contains("--format")) { args.removeOptions("--format"); } if (args.contains("--partition-directories")) { args.removeOptions("--partition-directories"); } boolean createExportSubdirectory = true; if (command.equals("apply")){ args.addOption("--partition-directories", String.format("timestamp=%s", timestamp)); createExportSubdirectory = false; if (args.contains("export-pg")) { args.addOption("--format", PropertyGraphExportFormat.neptuneStreamsSimpleJson.name()); } else { args.addOption("--format", RdfExportFormat.neptuneStreamsSimpleJson.name()); } } else { if (args.contains("export-pg")) { args.addOption("--format", PropertyGraphExportFormat.csv.name()); } else { args.addOption("--format", RdfExportFormat.nquads.name()); } } s3UploadParams.setCreateExportSubdirectory(createExportSubdirectory).setOverwriteExisting(true); } @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { onExportComplete(directories, stats, cluster, null); } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { Timer.timedActivity("getting LastEventId from stream", (CheckedActivity.Runnable) () -> getLastEventIdFromStream(cluster, graphSchema == null ? "sparql" : "gremlin")); } private void getLastEventIdFromStream(Cluster cluster, String streamEndpointType) { EventId eventId = new GetLastEventId(cluster.clusterMetadata(), cluster.connectionConfig(), streamEndpointType).execute(); if (eventId != null) { commitNum.set(eventId.commitNum()); opNum.set(eventId.opNum()); } } }
1,072
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/NeptuneMachineLearningExportEventHandlerV2.java
package com.amazonaws.services.neptune.profiles.neptune_ml; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.export.ExportToS3NeptuneExportEventHandler; import com.amazonaws.services.neptune.export.NeptuneExportServiceEventHandler; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.PropertyGraphTrainingDataConfigWriterV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.RdfTrainingDataConfigWriter; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.io.CsvPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.JsonPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.S3ObjectInfo; import com.amazonaws.services.neptune.util.Timer; import com.amazonaws.services.neptune.util.TransferManagerWrapper; import com.amazonaws.services.s3.Headers; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.SSEAlgorithm; import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.Upload; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang.StringUtils; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import static com.amazonaws.services.neptune.export.NeptuneExportService.NEPTUNE_ML_PROFILE_NAME; public class NeptuneMachineLearningExportEventHandlerV2 implements NeptuneExportServiceEventHandler { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(NeptuneMachineLearningExportEventHandlerV2.class); private final String outputS3Path; private final String s3Region; private final Args args; private final NeptuneMLSourceDataModel dataModel; private final Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection; private final Collection<String> profiles; private final boolean createExportSubdirectory; private final PrinterOptions printerOptions; private final boolean includeEdgeFeatures; private final String sseKmsKeyId; private final AWSCredentialsProvider s3CredentialsProvider; public NeptuneMachineLearningExportEventHandlerV2(String outputS3Path, String s3Region, boolean createExportSubdirectory, ObjectNode additionalParams, Args args, Collection<String> profiles, String sseKmsKeyId, AWSCredentialsProvider s3CredentialsProvider) { logger.info("Adding neptune_ml event handler"); CsvPrinterOptions csvPrinterOptions = CsvPrinterOptions.builder() .setMultiValueSeparator(";") .setEscapeCsvHeaders(args.contains("--escape-csv-headers")) .build(); JsonPrinterOptions jsonPrinterOptions = JsonPrinterOptions.builder() .setStrictCardinality(true) .build(); this.outputS3Path = outputS3Path; this.s3Region = s3Region; this.createExportSubdirectory = createExportSubdirectory; this.args = args; this.dataModel = args.contains("export-rdf") ? NeptuneMLSourceDataModel.RDF : NeptuneMLSourceDataModel.PropertyGraph; this.trainingJobWriterConfigCollection = createTrainingJobConfigCollection(additionalParams); this.profiles = profiles; this.printerOptions = new PrinterOptions(csvPrinterOptions, jsonPrinterOptions); this.includeEdgeFeatures = shouldIncludeEdgeFeatures(additionalParams); this.sseKmsKeyId = sseKmsKeyId; this.s3CredentialsProvider = s3CredentialsProvider; } private boolean shouldIncludeEdgeFeatures(ObjectNode additionalParams) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); if (neptuneMlNode.isMissingNode()){ return true; } if (neptuneMlNode.has("disableEdgeFeatures") && neptuneMlNode.path("disableEdgeFeatures").asBoolean()){ return false; } return true; } private Collection<TrainingDataWriterConfigV2> createTrainingJobConfigCollection(ObjectNode additionalParams) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); if (neptuneMlNode.isMissingNode()) { logger.info("No 'neptune_ml' config node in additional params so creating default training config"); return Collections.singletonList(new TrainingDataWriterConfigV2()); } else { Collection<TrainingDataWriterConfigV2> trainingJobWriterConfig = TrainingDataWriterConfigV2.fromJson(neptuneMlNode, this.dataModel); logger.info("Training job writer config: {}", trainingJobWriterConfig); return trainingJobWriterConfig; } } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { logger.info("ARGS: {}", args.toString()); dataModel.updateArgsBeforeExport(args, trainingJobWriterConfigCollection); if (args.contains("--export-id")) { args.removeOptions("--export-id"); } args.addOption("--export-id", new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date())); } @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { onExportComplete(directories, stats, cluster, new GraphSchema()); } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { PropertyName propertyName = args.contains("--exclude-type-definitions") ? PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE : PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITH_DATATYPE; try (TransferManagerWrapper transferManager = new TransferManagerWrapper(s3Region, s3CredentialsProvider)) { for (TrainingDataWriterConfigV2 trainingJobWriterConfig : trainingJobWriterConfigCollection) { createTrainingJobConfigurationFile(trainingJobWriterConfig, directories.rootDirectory(), graphSchema, propertyName, transferManager); } } } private void createTrainingJobConfigurationFile(TrainingDataWriterConfigV2 trainingDataWriterConfig, Path outputPath, GraphSchema graphSchema, PropertyName propertyName, TransferManagerWrapper transferManager) throws Exception { File outputDirectory = outputPath.toFile(); String filename = String.format("%s.json", trainingDataWriterConfig.name()); File trainingJobConfigurationFile = new File(outputPath.toFile(), filename); try (Writer writer = new PrintWriter(trainingJobConfigurationFile)) { if (dataModel == NeptuneMLSourceDataModel.RDF) { Collection<String> filenames = new ArrayList<>(); File[] directories = outputDirectory.listFiles(File::isDirectory); for (File directory : directories) { File[] files = directory.listFiles(File::isFile); for (File file : files) { filenames.add(outputDirectory.toPath().relativize(file.toPath()).toString()); } } new RdfTrainingDataConfigWriter( filenames, createJsonGenerator(writer), trainingDataWriterConfig).write(); } else { new PropertyGraphTrainingDataConfigWriterV2( graphSchema, createJsonGenerator(writer), propertyName, printerOptions, trainingDataWriterConfig).write(includeEdgeFeatures); } } if (StringUtils.isNotEmpty(outputS3Path)) { Timer.timedActivity("uploading training job configuration file to S3", (CheckedActivity.Runnable) () -> { S3ObjectInfo outputS3ObjectInfo = calculateOutputS3Path(outputDirectory); uploadTrainingJobConfigurationFileToS3( filename, transferManager.get(), trainingJobConfigurationFile, outputS3ObjectInfo); }); } } private void uploadTrainingJobConfigurationFileToS3(String filename, TransferManager transferManager, File trainingJobConfigurationFile, S3ObjectInfo outputS3ObjectInfo) throws IOException { S3ObjectInfo s3ObjectInfo = outputS3ObjectInfo.withNewKeySuffix(filename); try (InputStream inputStream = new FileInputStream(trainingJobConfigurationFile)) { PutObjectRequest putObjectRequest = new PutObjectRequest(s3ObjectInfo.bucket(), s3ObjectInfo.key(), inputStream, S3ObjectInfo.createObjectMetadata(trainingJobConfigurationFile.length(),sseKmsKeyId)) .withTagging(ExportToS3NeptuneExportEventHandler.createObjectTags(profiles)); Upload upload = transferManager.upload(putObjectRequest); upload.waitForUploadResult(); } catch (InterruptedException e) { logger.warn(e.getMessage()); Thread.currentThread().interrupt(); } } private S3ObjectInfo calculateOutputS3Path(File outputDirectory) { S3ObjectInfo outputBaseS3ObjectInfo = new S3ObjectInfo(outputS3Path); if (createExportSubdirectory) { return outputBaseS3ObjectInfo.withNewKeySuffix(outputDirectory.getName()); } else { return outputBaseS3ObjectInfo; } } private JsonGenerator createJsonGenerator(Writer writer) throws IOException { JsonGenerator generator = new JsonFactory().createGenerator(writer); generator.setPrettyPrinter(new DefaultPrettyPrinter()); return generator; } }
1,073
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/NeptuneMLSourceDataModel.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseProperty; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.LabelConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.RdfTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseNodeTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseRdfTaskType; import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.rdf.RdfExportScope; import com.amazonaws.services.neptune.rdf.io.RdfExportFormat; import com.fasterxml.jackson.databind.JsonNode; import java.util.Collection; public enum NeptuneMLSourceDataModel { PropertyGraph { @Override void updateArgsBeforeExport(Args args, Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection) { if (!args.contains("--exclude-type-definitions")) { args.addFlag("--exclude-type-definitions"); } if (args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeLabelsOnly.name())) { args.removeOptions("--edge-label-strategy"); } if (!args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name())) { args.addOption("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name()); } if (!args.contains("--merge-files")) { args.addFlag("--merge-files"); } if (args.contains("export-pg") && args.containsAny("--config", "--filter", "-c", "--config-file", "--filter-config-file")) { args.replace("export-pg", "export-pg-from-config"); } } @Override public String nodeTypeName() { return "Label"; } @Override public String nodeAttributeNameSingular() { return "Property"; } @Override public String nodeAttributeNamePlural() { return "Properties"; } @Override public String parseTaskType(JsonNode json, ParsingContext propertyContext, Label nodeType, String property) { return new ParseNodeTaskTypeV2(json, propertyContext).parseTaskType().name(); } @Override public String parseProperty(JsonNode json, ParsingContext propertyContext, Label nodeType) { return new ParseProperty(json, propertyContext.withLabel(nodeType), this).parseSingleProperty(); } @Override public String labelFields() { return "'node' or 'edge'"; } @Override public boolean isRdfLinkPrediction(JsonNode json) { return false; } }, RDF { @Override void updateArgsBeforeExport(Args args, Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection) { args.removeOptions("--format"); args.addOption("--format", RdfExportFormat.ntriples.name()); boolean exportEdgesOnly = true; for (TrainingDataWriterConfigV2 trainingDataWriterConfigV2 : trainingJobWriterConfigCollection) { for (LabelConfigV2 labelConfig : trainingDataWriterConfigV2.nodeConfig().getAllClassificationSpecifications()) { String taskType = labelConfig.taskType(); if (taskType.equals(RdfTaskTypeV2.classification.name()) || taskType.equals(RdfTaskTypeV2.regression.name())){ exportEdgesOnly = false; } } } if (!args.contains("--rdf-export-scope") && exportEdgesOnly){ args.addOption("--rdf-export-scope", RdfExportScope.edges.name()); } } @Override public String nodeTypeName() { return "Class"; } @Override public String nodeAttributeNameSingular() { return "Predicate"; } @Override public String nodeAttributeNamePlural() { return "Predicates"; } @Override public String parseTaskType(JsonNode json, ParsingContext propertyContext, Label nodeType, String property) { RdfTaskTypeV2 taskType = new ParseRdfTaskType(json, propertyContext).parseTaskType(); taskType.validate(property, nodeType); return taskType.name(); } @Override public String parseProperty(JsonNode json, ParsingContext propertyContext, Label nodeType) { return new ParseProperty(json, propertyContext.withLabel(nodeType), this).parseNullableSingleProperty(); } @Override public String labelFields() { return "'node'"; } @Override public boolean isRdfLinkPrediction(JsonNode json) { return parseTaskType(json, new ParsingContext("RDF target"), null, null).equals(RdfTaskTypeV2.link_prediction.name()); } }; abstract void updateArgsBeforeExport(Args args, Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection); public abstract String nodeTypeName(); public abstract String nodeAttributeNameSingular(); public abstract String nodeAttributeNamePlural(); public abstract String parseTaskType(JsonNode json, ParsingContext propertyContext, Label nodeType, String property); public abstract String parseProperty(JsonNode json, ParsingContext propertyContext, Label nodeType); public abstract String labelFields(); public abstract boolean isRdfLinkPrediction(JsonNode json); }
1,074
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/NeptuneMachineLearningExportEventHandlerV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.export.ExportToS3NeptuneExportEventHandler; import com.amazonaws.services.neptune.export.NeptuneExportServiceEventHandler; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.PropertyGraphTrainingDataConfigWriterV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.TrainingDataWriterConfigV1; import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.io.CsvPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.JsonPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.S3ObjectInfo; import com.amazonaws.services.neptune.util.Timer; import com.amazonaws.services.neptune.util.TransferManagerWrapper; import com.amazonaws.services.s3.Headers; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.SSEAlgorithm; import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.Upload; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang.StringUtils; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Collections; import java.util.Date; import static com.amazonaws.services.neptune.export.NeptuneExportService.NEPTUNE_ML_PROFILE_NAME; public class NeptuneMachineLearningExportEventHandlerV1 implements NeptuneExportServiceEventHandler { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(NeptuneMachineLearningExportEventHandlerV1.class); private final String outputS3Path; private final String s3Region; private final Args args; private final Collection<TrainingDataWriterConfigV1> trainingJobWriterConfigCollection; private final Collection<String> profiles; private final boolean createExportSubdirectory; private final PrinterOptions printerOptions; private final String sseKmsKeyId; private final AWSCredentialsProvider s3CredentialsProvider; public NeptuneMachineLearningExportEventHandlerV1(String outputS3Path, String s3Region, boolean createExportSubdirectory, ObjectNode additionalParams, Args args, Collection<String> profiles, String sseKmsKeyId, AWSCredentialsProvider s3CredentialsProvider) { logger.info("Adding neptune_ml event handler"); CsvPrinterOptions csvPrinterOptions = CsvPrinterOptions.builder() .setMultiValueSeparator(";") .setEscapeCsvHeaders(args.contains("--escape-csv-headers")) .build(); JsonPrinterOptions jsonPrinterOptions = JsonPrinterOptions.builder() .setStrictCardinality(true) .build(); this.outputS3Path = outputS3Path; this.s3Region = s3Region; this.createExportSubdirectory = createExportSubdirectory; this.args = args; this.trainingJobWriterConfigCollection = createTrainingJobConfigCollection(additionalParams); this.profiles = profiles; this.printerOptions = new PrinterOptions(csvPrinterOptions, jsonPrinterOptions); this.sseKmsKeyId = sseKmsKeyId; this.s3CredentialsProvider = s3CredentialsProvider; } private Collection<TrainingDataWriterConfigV1> createTrainingJobConfigCollection(ObjectNode additionalParams) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); if (neptuneMlNode.isMissingNode()) { logger.info("No 'neptune_ml' config node in additional params so creating default training config"); return Collections.singletonList(new TrainingDataWriterConfigV1()); } else { Collection<TrainingDataWriterConfigV1> trainingJobWriterConfig = TrainingDataWriterConfigV1.fromJson(neptuneMlNode); logger.info("Training job writer config: {}", trainingJobWriterConfig); return trainingJobWriterConfig; } } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { if (args.contains("export-pg")) { if (!args.contains("--exclude-type-definitions")) { args.addFlag("--exclude-type-definitions"); } if (args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeLabelsOnly.name())) { args.removeOptions("--edge-label-strategy"); } if (!args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name())) { args.addOption("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name()); } if (args.containsAny("--config", "--filter", "-c", "--config-file", "--filter-config-file")){ args.replace("export-pg", "export-pg-from-config"); } if (!args.contains("--merge-files")) { args.addFlag("--merge-files"); } } if (args.contains("--export-id")) { args.removeOptions("--export-id"); } args.addOption("--export-id", new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date())); } @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { //Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { PropertyName propertyName = args.contains("--exclude-type-definitions") ? PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE : PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITH_DATATYPE; try (TransferManagerWrapper transferManager = new TransferManagerWrapper(s3Region, s3CredentialsProvider)) { for (TrainingDataWriterConfigV1 trainingJobWriterConfig : trainingJobWriterConfigCollection) { createTrainingJobConfigurationFile(trainingJobWriterConfig, directories.rootDirectory(), graphSchema, propertyName, transferManager); } } } private void createTrainingJobConfigurationFile(TrainingDataWriterConfigV1 trainingJobWriterConfig, Path outputPath, GraphSchema graphSchema, PropertyName propertyName, TransferManagerWrapper transferManager) throws Exception { File outputDirectory = outputPath.toFile(); String filename = String.format("%s.json", trainingJobWriterConfig.name()); File trainingJobConfigurationFile = new File(outputPath.toFile(), filename); try (Writer writer = new PrintWriter(trainingJobConfigurationFile)) { new PropertyGraphTrainingDataConfigWriterV1( graphSchema, createJsonGenerator(writer), propertyName, printerOptions, trainingJobWriterConfig).write(); } if (StringUtils.isNotEmpty(outputS3Path)) { Timer.timedActivity("uploading training job configuration file to S3", (CheckedActivity.Runnable) () -> { S3ObjectInfo outputS3ObjectInfo = calculateOutputS3Path(outputDirectory); uploadTrainingJobConfigurationFileToS3( filename, transferManager.get(), trainingJobConfigurationFile, outputS3ObjectInfo); }); } } private void uploadTrainingJobConfigurationFileToS3(String filename, TransferManager transferManager, File trainingJobConfigurationFile, S3ObjectInfo outputS3ObjectInfo) throws IOException { S3ObjectInfo s3ObjectInfo = outputS3ObjectInfo.withNewKeySuffix(filename); try (InputStream inputStream = new FileInputStream(trainingJobConfigurationFile)) { ObjectMetadata objectMetadata = new ObjectMetadata(); PutObjectRequest putObjectRequest = new PutObjectRequest(s3ObjectInfo.bucket(), s3ObjectInfo.key(), inputStream, S3ObjectInfo.createObjectMetadata(trainingJobConfigurationFile.length(), sseKmsKeyId)) .withTagging(ExportToS3NeptuneExportEventHandler.createObjectTags(profiles)); Upload upload = transferManager.upload(putObjectRequest); upload.waitForUploadResult(); } catch (InterruptedException e) { logger.warn(e.getMessage()); Thread.currentThread().interrupt(); } } private S3ObjectInfo calculateOutputS3Path(File outputDirectory) { S3ObjectInfo outputBaseS3ObjectInfo = new S3ObjectInfo(outputS3Path); if (createExportSubdirectory) { return outputBaseS3ObjectInfo.withNewKeySuffix(outputDirectory.getName()); } else { return outputBaseS3ObjectInfo; } } private JsonGenerator createJsonGenerator(Writer writer) throws IOException { JsonGenerator generator = new JsonFactory().createGenerator(writer); generator.setPrettyPrinter(new DefaultPrettyPrinter()); return generator; } }
1,075
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.fasterxml.jackson.core.JsonGenerator; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; public class PropertyGraphTrainingDataConfigWriterV1 { public static final PropertyName COLUMN_NAME_WITH_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithDataType(); } }; public static final PropertyName COLUMN_NAME_WITHOUT_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithoutDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithoutDataType(); } }; private final GraphSchema graphSchema; private final JsonGenerator generator; private final PropertyName propertyName; private final TrainingDataWriterConfigV1 config; private final PrinterOptions printerOptions; private final Collection<String> warnings = new ArrayList<>(); public PropertyGraphTrainingDataConfigWriterV1(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions) { this(graphSchema, generator, propertyName, printerOptions, new TrainingDataWriterConfigV1()); } public PropertyGraphTrainingDataConfigWriterV1(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions, TrainingDataWriterConfigV1 config ) { this.graphSchema = graphSchema; this.generator = generator; this.propertyName = propertyName; this.printerOptions = printerOptions; this.config = config; } public void write() throws IOException { generator.writeStartObject(); generator.writeArrayFieldStart("graph"); writeNodes(); writeEdges(); generator.writeEndArray(); generator.writeArrayFieldStart("warnings"); writeWarnings(); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); } private void writeWarnings() throws IOException { for (String warning : warnings) { generator.writeString(warning); } } private void writeNodes() throws IOException { GraphElementType graphElementType = GraphElementType.nodes; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); for (Label nodeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(nodeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(nodeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeSeparator(","); if (config.hasNodeClassificationSpecificationForNode(nodeLabel)) { writeNodeLabel(labelSchema, config.getNodeClassificationPropertyForNode(nodeLabel)); } writeNodeFeatures(nodeLabel, labelSchema.propertySchemas(), labelSchema); generator.writeEndObject(); } } } private void writeNodeLabel(LabelSchema labelSchema, LabelConfigV1 labelConfig) throws IOException { Label label = labelSchema.label(); if (labelSchema.containsProperty(labelConfig.property())) { generator.writeArrayFieldStart("labels"); PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); generator.writeStartObject(); generator.writeStringField("label_type", "node"); generator.writeStringField("sub_label_type", labelConfig.labelType()); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); writeSplitRates(labelConfig); if (propertySchema.isMultiValue()) { writeSeparator(";"); } generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); generator.writeEndArray(); } else { warnings.add( String.format("Unable to add node class label: Node of type '%s' does not contain property '%s'.", label.fullyQualifiedLabel(), labelConfig.property())); } } private void writeSplitRates(LabelConfigV1 labelConfig) throws IOException { generator.writeArrayFieldStart("split_rate"); for (Double rate : labelConfig.splitRates()) { generator.writeNumber(rate); } generator.writeEndArray(); } private void writeNodeFeatures(Label label, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { boolean arrayStartHasBeenWritten = false; for (PropertySchema propertySchema : propertySchemas) { String column = propertySchema.nameWithoutDataType(); if (!config.isNodeClassificationPropertyForNode(label, column)) { if (!arrayStartHasBeenWritten) { generator.writeArrayFieldStart("features"); arrayStartHasBeenWritten = true; } if (!config.hasNodeFeatureOverrideForNodeProperty(label, column)) { writeNodeFeature(label, propertySchema, labelSchema); } } } for (FeatureOverrideConfigV1 featureOverride : config.getNodeFeatureOverrides(label)) { writeNodeFeatureOverride(label, featureOverride, propertySchemas, labelSchema); } if (arrayStartHasBeenWritten) { generator.writeEndArray(); } } private void writeNodeFeature(Label label, PropertySchema propertySchema, LabelSchema labelSchema) throws IOException { if (propertySchema.dataType() == DataType.Float || propertySchema.dataType() == DataType.Double) { writeNumericalNodeFeature(label, Collections.singletonList(propertySchema), Norm.min_max, labelSchema); } if (propertySchema.dataType() == DataType.Byte || propertySchema.dataType() == DataType.Short || propertySchema.dataType() == DataType.Integer || propertySchema.dataType() == DataType.Long) { writeNumericalNodeFeature(label, Collections.singletonList(propertySchema), Norm.min_max, labelSchema); } if (propertySchema.dataType() == DataType.String || propertySchema.dataType() == DataType.Boolean) { writeCategoricalNodeFeature(label, Collections.singletonList(propertySchema)); } } private void writeNodeFeatureOverride(Label label, FeatureOverrideConfigV1 featureOverride, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { if (featureOverride.isSinglePropertyOverride()) { PropertySchema propertySchema = propertySchemas.stream() .filter(p -> p.nameWithoutDataType().equals(featureOverride.firstProperty())) .findFirst() .orElse(null); if (propertySchema == null) { warnings.add(String.format("Unable to add node feature: Node of type '%s' does not contain property '%s'.", label.fullyQualifiedLabel(), featureOverride.firstProperty())); } else { FeatureTypeV1 featureType = featureOverride.featureType(); if (FeatureTypeV1.category == featureType) { writeCategoricalNodeFeature(label, Collections.singletonList(propertySchema), featureOverride.separator()); } else if (FeatureTypeV1.numerical == featureType) { writeNumericalNodeFeature(label, Collections.singletonList(propertySchema), featureOverride.norm(), labelSchema, featureOverride.separator()); } } } else { boolean allPropertiesPresent = featureOverride.properties().stream() .allMatch(p -> propertySchemas.stream() .anyMatch(s -> s.nameWithoutDataType().equals(p))); if (!allPropertiesPresent) { warnings.add(String.format("Unable to add multi-property node feature: Node of type '%s' does not contain one or more of the following properties: %s.", label.fullyQualifiedLabel(), featureOverride.properties().stream() .map(s -> String.format("'%s'", s)) .collect(Collectors.joining(", ")))); } else { FeatureTypeV1 featureType = featureOverride.featureType(); List<PropertySchema> multiPropertySchemas = propertySchemas.stream() .filter(p -> featureOverride.properties().contains(p.nameWithoutDataType())) .collect(Collectors.toList()); if (FeatureTypeV1.category == featureType) { writeCategoricalNodeFeature(label, multiPropertySchemas); } else if (FeatureTypeV1.numerical == featureType) { writeNumericalNodeFeature(label, multiPropertySchemas, featureOverride.norm(), labelSchema); } } } } private void writeCategoricalNodeFeature(Label label, Collection<PropertySchema> propertySchemas) throws IOException { writeCategoricalNodeFeature(label, propertySchemas, new Separator()); } private void writeCategoricalNodeFeature(Label label, Collection<PropertySchema> propertySchemas, Separator separator) throws IOException { boolean isSinglePropertyFeature = propertySchemas.size() == 1; PropertySchema firstPropertySchema = propertySchemas.iterator().next(); if (isSinglePropertyFeature && config.hasWord2VecSpecification(label, firstPropertySchema.nameWithoutDataType())) { writeWord2VecFeature(label, firstPropertySchema); } else { generator.writeStartObject(); generator.writeStringField("feat_type", "node"); generator.writeStringField("sub_feat_type", "category"); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); for (PropertySchema propertySchema : propertySchemas) { generator.writeString(propertyName.escaped(propertySchema, printerOptions)); } generator.writeEndArray(); if (isSinglePropertyFeature) { separator.writeTo(generator, firstPropertySchema.isMultiValue()); } generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } } private void writeWord2VecFeature(Label label, PropertySchema propertySchema) throws IOException { Word2VecConfig word2VecConfig = config.getWord2VecSpecification(label, propertySchema.nameWithoutDataType()); generator.writeStartObject(); generator.writeStringField("feat_type", "node"); generator.writeStringField("sub_feat_type", "word2vec"); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); generator.writeArrayFieldStart("language"); for (String language : word2VecConfig.languages()) { generator.writeString(language); } generator.writeEndArray(); generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } private void writeNumericalNodeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema) throws IOException { writeNumericalNodeFeature(label, propertySchemas, norm, labelSchema, null); } private void writeNumericalNodeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema, Separator separator) throws IOException { boolean isSinglePropertyFeature = propertySchemas.size() == 1; PropertySchema firstPropertySchema = propertySchemas.iterator().next(); if (isSinglePropertyFeature && config.hasNumericalBucketSpecification(label, firstPropertySchema.nameWithoutDataType())) { writeNumericalBucketFeature(label, firstPropertySchema); } else { List<String> multiValueProperties = propertySchemas.stream() .filter(PropertySchema::isMultiValue) .map(PropertySchema::nameWithoutDataType) .collect(Collectors.toList()); if (!multiValueProperties.isEmpty()) { warnings.add(String.format("Unable to add numerical node feature: Node of type '%s' has one or more multi-value numerical properties: %s.", label.fullyQualifiedLabel(), multiValueProperties)); return; } generator.writeStartObject(); generator.writeStringField("feat_type", "node"); FeatureTypeV1.numerical.addTo(generator); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); for (PropertySchema propertySchema : propertySchemas) { generator.writeString(propertyName.escaped(propertySchema, printerOptions)); } generator.writeEndArray(); norm.addTo(generator); generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } } private void writeNumericalBucketFeature(Label label, PropertySchema propertySchema) throws IOException { NumericalBucketFeatureConfigV1 featureConfig = config.getNumericalBucketSpecification(label, propertySchema.nameWithoutDataType()); if (propertySchema.isMultiValue()) { warnings.add(String.format( "Unable to add numerical bucket feature: Property '%s' of node type '%s' is a multi-value property.", propertySchema.nameWithoutDataType(), label.fullyQualifiedLabel())); } else { generator.writeStartObject(); generator.writeStringField("feat_type", "node"); generator.writeStringField("sub_feat_type", "bucket_numerical"); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); generator.writeArrayFieldStart("range"); generator.writeObject(featureConfig.range().low()); generator.writeObject(featureConfig.range().high()); generator.writeEndArray(); generator.writeNumberField("bucket_cnt", featureConfig.bucketCount()); generator.writeNumberField("slide_window_size", featureConfig.slideWindowSize()); generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } } private void writeEdges() throws IOException { GraphElementType graphElementType = GraphElementType.edges; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); for (Label edgeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(edgeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(edgeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeSeparator(","); if (graphElementSchemas.getSchemaFor(edgeLabel).propertyCount() == 0) { generator.writeArrayFieldStart("edges"); generator.writeStartObject(); writeEdgeSpecType(); writeCols(); writeEdgeType(edgeLabel); generator.writeEndObject(); generator.writeEndArray(); } else { if (config.hasEdgeClassificationSpecificationForEdge(edgeLabel)) { writeEdgeLabel(labelSchema, config.getEdgeClassificationPropertyForEdge(edgeLabel)); } writeEdgeFeatures(edgeLabel, labelSchema.propertySchemas(), labelSchema); } generator.writeEndObject(); } } } private void writeEdgeFeatures(Label label, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { boolean arrayStartHasBeenWritten = false; for (PropertySchema propertySchema : propertySchemas) { if (!config.isEdgeClassificationPropertyForEdge(label, propertySchema.nameWithoutDataType())) { if (!arrayStartHasBeenWritten) { generator.writeArrayFieldStart("features"); arrayStartHasBeenWritten = true; } if (!propertySchema.isMultiValue()) { if (!config.hasEdgeFeatureOverrideForEdgeProperty(label, propertySchema.nameWithoutDataType())) { writeNumericalEdgeFeature(label, Collections.singletonList(propertySchema), Norm.min_max, labelSchema); } } } } for (FeatureOverrideConfigV1 featureOverride : config.getEdgeFeatureOverrides(label)) { writeEdgeFeatureOverride(label, featureOverride, propertySchemas, labelSchema); } if (arrayStartHasBeenWritten) { generator.writeEndArray(); } } private void writeEdgeFeatureOverride(Label label, FeatureOverrideConfigV1 featureOverride, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { if (featureOverride.isSinglePropertyOverride()) { PropertySchema propertySchema = propertySchemas.stream() .filter(p -> p.nameWithoutDataType().equals(featureOverride.firstProperty())) .findFirst() .orElse(null); if (propertySchema == null) { warnings.add(String.format("Unable to add edge feature: Edge of type '%s' does not contain property '%s'.", label.fullyQualifiedLabel(), featureOverride.firstProperty())); } else { FeatureTypeV1 featureType = featureOverride.featureType(); if (FeatureTypeV1.numerical == featureType) { writeNumericalEdgeFeature(label, Collections.singletonList(propertySchema), featureOverride.norm(), labelSchema, featureOverride.separator()); } } } else { boolean allPropertiesPresent = featureOverride.properties().stream() .allMatch(p -> propertySchemas.stream() .anyMatch(s -> s.nameWithoutDataType().equals(p))); if (!allPropertiesPresent) { warnings.add(String.format("Unable to add multi-property edge feature: Edge of type '%s' does not contain one or more of the following properties: %s.", label.fullyQualifiedLabel(), featureOverride.properties().stream() .map(s -> String.format("'%s'", s)) .collect(Collectors.joining(", ")))); } else { FeatureTypeV1 featureType = featureOverride.featureType(); List<PropertySchema> multiPropertySchemas = propertySchemas.stream() .filter(p -> featureOverride.properties().contains(p.nameWithoutDataType())) .collect(Collectors.toList()); if (FeatureTypeV1.numerical == featureType) { writeNumericalEdgeFeature(label, multiPropertySchemas, featureOverride.norm(), labelSchema); } } } } private void writeNumericalEdgeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema) throws IOException { writeNumericalEdgeFeature(label, propertySchemas, norm, labelSchema, new Separator()); } private void writeNumericalEdgeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema, Separator separator) throws IOException { boolean isSinglePropertyFeature = propertySchemas.size() == 1; PropertySchema firstPropertySchema = propertySchemas.iterator().next(); if (isSinglePropertyFeature) { PropertySchemaStats propertySchemaStats = labelSchema.getPropertySchemaStats(firstPropertySchema.property()); if (firstPropertySchema.isMultiValue() && !propertySchemaStats.isUniformCardinality()) { warnings.add(String.format("Unable to add numerical edge feature: Edge of type '%s' has a multi-value numerical property '%s' with differing numbers of values.", label.fullyQualifiedLabel(), firstPropertySchema.property())); return; } } generator.writeStartObject(); generator.writeStringField("feat_type", "edge"); FeatureTypeV1.numerical.addTo(generator); generator.writeArrayFieldStart("cols"); generator.writeString("~from"); generator.writeString("~to"); for (PropertySchema propertySchema : propertySchemas) { generator.writeString(propertyName.escaped(propertySchema, printerOptions)); } generator.writeEndArray(); norm.addTo(generator); if (isSinglePropertyFeature) { separator.writeTo(generator, firstPropertySchema.isMultiValue()); } writeEdgeType(label); generator.writeEndObject(); } private void writeEdgeLabel(LabelSchema labelSchema, LabelConfigV1 labelConfig) throws IOException { Label label = labelSchema.label(); if (labelSchema.containsProperty(labelConfig.property())) { PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); generator.writeArrayFieldStart("labels"); generator.writeStartObject(); generator.writeStringField("label_type", "edge"); generator.writeStringField("sub_label_type", labelConfig.labelType()); generator.writeArrayFieldStart("cols"); generator.writeString("~from"); generator.writeString("~to"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); writeSplitRates(labelConfig); if (propertySchema.isMultiValue()) { writeSeparator(";"); } writeEdgeType(label); generator.writeEndObject(); generator.writeEndArray(); } else { warnings.add( String.format("Unable to add edge class label: Edge of type '%s' does not contain property '%s'.", label.labelsAsString(), labelConfig.property())); } } private void writeSeparator(String separator) throws IOException { generator.writeStringField("separator", separator); } private void writeFileName(GraphElementType graphElementType, String outputId) throws IOException { generator.writeStringField("file_name", String.format("%s/%s", graphElementType.name(), new File(outputId).getName())); } private void writeEdgeSpecType() throws IOException { generator.writeStringField("edge_spec_type", "edge"); } private void writeCols() throws IOException { generator.writeArrayFieldStart("cols"); generator.writeString("~from"); generator.writeString("~to"); generator.writeEndArray(); } private void writeEdgeType(Label label) throws IOException { generator.writeArrayFieldStart("edge_type"); generator.writeString(label.fromLabelsAsString()); generator.writeString(label.labelsAsString()); generator.writeString(label.toLabelsAsString()); generator.writeEndArray(); } }
1,076
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigBuilderV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.LabelConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureOverrideConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.NumericalBucketFeatureConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.TrainingDataWriterConfigV1; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.*; public class PropertyGraphTrainingDataConfigBuilderV1 { public static PropertyGraphTrainingDataConfigBuilderV1 builder() { return new PropertyGraphTrainingDataConfigBuilderV1(); } Map<Label, LabelConfigV1> nodeClassLabels = new HashMap<>(); Map<Label, LabelConfigV1> edgeClassLabels = new HashMap<>(); Collection<Word2VecConfig> word2VecNodeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures = new ArrayList<>(); Collection<FeatureOverrideConfigV1> nodeFeatureOverrides = new ArrayList<>(); Collection<FeatureOverrideConfigV1> edgeFeatureOverrides = new ArrayList<>(); Collection<Double> splitRates = Arrays.asList(0.7, 0.1, 0.2); public PropertyGraphTrainingDataConfigBuilderV1 withNodeClassLabel(Label label, String column) { nodeClassLabels.put(label, new LabelConfigV1("node_class_label", column, splitRates)); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withEdgeClassLabel(Label label, String column) { edgeClassLabels.put(label, new LabelConfigV1("edge_class_label", column, splitRates)); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withWord2VecNodeFeature(Label label, String column, String... languages) { word2VecNodeFeatures.add(new Word2VecConfig(label, column, Arrays.asList(languages))); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withNumericalBucketFeature(Label label, String column, Range range, int bucketCount, int slideWindowSize) { numericalBucketFeatures.add(new NumericalBucketFeatureConfigV1(label, column, range, bucketCount, slideWindowSize)); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withNodeFeatureOverride(FeatureOverrideConfigV1 override) { nodeFeatureOverrides.add(override); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withEdgeFeatureOverride(FeatureOverrideConfigV1 override) { edgeFeatureOverrides.add(override); return this; } public TrainingDataWriterConfigV1 build() { return new TrainingDataWriterConfigV1("training-job", nodeClassLabels, edgeClassLabels, word2VecNodeFeatures, numericalBucketFeatures, nodeFeatureOverrides, edgeFeatureOverrides); } }
1,077
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/FeatureTypeV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonNode; import java.io.IOException; public enum FeatureTypeV1 { category{ @Override public void validateOverride(JsonNode node, String description, Label label) { // Do nothing } }, numerical{ @Override public void validateOverride(JsonNode node, String description, Label label) { if (node.has("separator")){ throw new IllegalArgumentException(String.format("Invalid 'separator' field for %s for '%s': numerical feature properties cannot contain multiple values.", description, label.fullyQualifiedLabel())); } } }, word2vec{ @Override public void validateOverride(JsonNode node, String description, Label label) { // Do nothing } }, bucket_numerical{ @Override public void validateOverride(JsonNode node, String description, Label label) { if (node.has("separator")){ throw new IllegalArgumentException(String.format("Invalid 'separator' field for %s for '%s': numerical feature properties cannot contain multiple values.", description, label.fullyQualifiedLabel())); } } }; public void addTo(JsonGenerator generator) throws IOException { generator.writeStringField("sub_feat_type", name()); } public abstract void validateOverride(JsonNode node, String description, Label label); }
1,078
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/NumericalBucketFeatureConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.propertygraph.Label; public class NumericalBucketFeatureConfigV1 { private final Label label; private final String property; private final Range range; private final int bucketCount; private final int slideWindowSize; public NumericalBucketFeatureConfigV1(Label label, String property, Range range, int bucketCount, int slideWindowSize) { this.label = label; this.property = property; this.range = range; this.bucketCount = bucketCount; this.slideWindowSize = slideWindowSize; } public Label label() { return label; } public String property() { return property; } public int bucketCount() { return bucketCount; } public int slideWindowSize() { return slideWindowSize; } public Range range() { return range; } @Override public String toString() { return "NumericalBucketFeatureConfig{" + "label=" + label + ", property='" + property + '\'' + ", range=" + range + ", bucketCount=" + bucketCount + ", slideWindowSize=" + slideWindowSize + '}'; } }
1,079
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/TrainingDataWriterConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing.ParseLabelsV1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseSplitRate; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.*; import java.util.stream.Collectors; public class TrainingDataWriterConfigV1 { public static final Collection<Double> DEFAULT_SPLIT_RATES_V1 = Arrays.asList(0.7, 0.1, 0.2); private static final String DEFAULT_NAME_V1 = "training-job-configuration"; public static Collection<TrainingDataWriterConfigV1> fromJson(JsonNode json) { Collection<TrainingDataWriterConfigV1> results = new ArrayList<>(); if (json.isArray()) { ArrayNode configNodes = (ArrayNode) json; int index = 1; for (JsonNode configNode : configNodes) { results.add(getTrainingJobWriterConfig(configNode, index++)); } } else { results.add(getTrainingJobWriterConfig(json, 1)); } Set<String> names = results.stream().map(TrainingDataWriterConfigV1::name).collect(Collectors.toSet()); if (names.size() < results.size()) { throw new IllegalStateException(String.format("Training job configuration names must be unique: %s", names)); } return results; } private static TrainingDataWriterConfigV1 getTrainingJobWriterConfig(JsonNode json, int index) { Map<Label, LabelConfigV1> nodeClassLabels = new HashMap<>(); Map<Label, LabelConfigV1> edgeClassLabels = new HashMap<>(); Collection<Word2VecConfig> word2VecNodeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures = new ArrayList<>(); Collection<FeatureOverrideConfigV1> nodeFeatureOverrides = new ArrayList<>(); Collection<FeatureOverrideConfigV1> edgeFeatureOverrides = new ArrayList<>(); Collection<Double> defaultSplitRates = new ParseSplitRate(json, DEFAULT_SPLIT_RATES_V1, new ParsingContext("config")).parseSplitRates(); String name = json.has("name") ? json.get("name").textValue() : index > 1 ? String.format("%s-%s", DEFAULT_NAME_V1, index) : DEFAULT_NAME_V1; if (json.has("targets")) { JsonNode labels = json.path("targets"); Collection<JsonNode> labelNodes = new ArrayList<>(); if (labels.isArray()) { labels.forEach(labelNodes::add); } else { labelNodes.add(labels); } ParseLabelsV1 parseLabels = new ParseLabelsV1(labelNodes, defaultSplitRates); parseLabels.validate(); nodeClassLabels.putAll(parseLabels.parseNodeClassLabels()); edgeClassLabels.putAll(parseLabels.parseEdgeClassLabels()); } if (json.has("features")) { JsonNode features = json.path("features"); Collection<JsonNode> featureNodes = new ArrayList<>(); if (features.isArray()) { features.forEach(featureNodes::add); } else { featureNodes.add(features); } ParseFeaturesV1 parseFeatures = new ParseFeaturesV1(featureNodes); parseFeatures.validate(); word2VecNodeFeatures.addAll(parseFeatures.parseWord2VecNodeFeatures()); numericalBucketFeatures.addAll(parseFeatures.parseNumericalBucketFeatures()); nodeFeatureOverrides.addAll(parseFeatures.parseNodeFeatureOverrides()); edgeFeatureOverrides.addAll(parseFeatures.parseEdgeFeatureOverrides()); } return new TrainingDataWriterConfigV1( name, nodeClassLabels, edgeClassLabels, word2VecNodeFeatures, numericalBucketFeatures, nodeFeatureOverrides, edgeFeatureOverrides); } private final String name; private final Map<Label, LabelConfigV1> nodeClassLabels; private final Map<Label, LabelConfigV1> edgeClassLabels; private final Collection<Word2VecConfig> word2VecNodeFeatures; private final Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures; private final Collection<FeatureOverrideConfigV1> nodeFeatureOverrides; private final Collection<FeatureOverrideConfigV1> edgeFeatureOverrides; public TrainingDataWriterConfigV1() { this(DEFAULT_NAME_V1, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); } public TrainingDataWriterConfigV1(String name, Map<Label, LabelConfigV1> nodeClassLabels, Map<Label, LabelConfigV1> edgeClassLabels, Collection<Word2VecConfig> word2VecNodeFeatures, Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures, Collection<FeatureOverrideConfigV1> nodeFeatureOverrides, Collection<FeatureOverrideConfigV1> edgeFeatureOverrides) { this.name = name; this.nodeClassLabels = nodeClassLabels; this.edgeClassLabels = edgeClassLabels; this.word2VecNodeFeatures = word2VecNodeFeatures; this.numericalBucketFeatures = numericalBucketFeatures; this.nodeFeatureOverrides = nodeFeatureOverrides; this.edgeFeatureOverrides = edgeFeatureOverrides; } public boolean hasNodeClassificationSpecificationForNode(Label nodeType) { return nodeClassLabels.containsKey(nodeType); } public LabelConfigV1 getNodeClassificationPropertyForNode(Label nodeType) { return nodeClassLabels.get(nodeType); } public boolean isNodeClassificationPropertyForNode(Label nodeType, String property) { if (hasNodeClassificationSpecificationForNode(nodeType)) { return getNodeClassificationPropertyForNode(nodeType).property().equals(property); } else { return false; } } public boolean hasEdgeClassificationSpecificationForEdge(Label edgeType) { return edgeClassLabels.containsKey(edgeType); } public LabelConfigV1 getEdgeClassificationPropertyForEdge(Label nodeType) { return edgeClassLabels.get(nodeType); } public boolean isEdgeClassificationPropertyForEdge(Label edgeType, String property) { if (hasEdgeClassificationSpecificationForEdge(edgeType)) { return getEdgeClassificationPropertyForEdge(edgeType).property().equals(property); } else { return false; } } public boolean hasWord2VecSpecification(Label nodeType, String property) { return getWord2VecSpecification(nodeType, property) != null; } public Word2VecConfig getWord2VecSpecification(Label nodeType, String property) { return word2VecNodeFeatures.stream() .filter(config -> config.label().equals(nodeType) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasNumericalBucketSpecification(Label nodeType, String property) { return getNumericalBucketSpecification(nodeType, property) != null; } public NumericalBucketFeatureConfigV1 getNumericalBucketSpecification(Label nodeType, String property) { return numericalBucketFeatures.stream() .filter(config -> config.label().equals(nodeType) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasNodeFeatureOverrideForNodeProperty(Label nodeType, String property) { return nodeFeatureOverrides.stream() .anyMatch(override -> override.label().equals(nodeType) && override.properties().contains(property)); } public Collection<FeatureOverrideConfigV1> getNodeFeatureOverrides(Label nodeType) { return nodeFeatureOverrides.stream() .filter(c -> c.label().equals(nodeType)) .collect(Collectors.toList()); } public FeatureOverrideConfigV1 getNodeFeatureOverride(Label nodeType, String property) { return nodeFeatureOverrides.stream() .filter(config -> config.label().equals(nodeType) && config.properties().contains(property)) .findFirst() .orElse(null); } public boolean hasEdgeFeatureOverrideForEdgeProperty(Label edgeType, String property) { return edgeFeatureOverrides.stream() .anyMatch(override -> override.label().equals(edgeType) && override.properties().contains(property)); } public Collection<FeatureOverrideConfigV1> getEdgeFeatureOverrides(Label edgeType) { return edgeFeatureOverrides.stream() .filter(c -> c.label().equals(edgeType)) .collect(Collectors.toList()); } public FeatureOverrideConfigV1 getEdgeFeatureOverride(Label edgeType, String property) { return edgeFeatureOverrides.stream() .filter(config -> config.label().equals(edgeType) && config.properties().contains(property)) .findFirst() .orElse(null); } @Override public String toString() { return "TrainingJobWriterConfig{" + "nodeClassLabels=" + nodeClassLabels + ", edgeClassLabels=" + edgeClassLabels + ", word2VecNodeFeatures=" + word2VecNodeFeatures + ", numericalBucketFeatures=" + numericalBucketFeatures + ", nodeFeatureOverrides=" + nodeFeatureOverrides + ", edgeFeatureOverrides=" + edgeFeatureOverrides + '}'; } public String name() { return name; } }
1,080
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/LabelConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import java.util.Collection; import java.util.Optional; public class LabelConfigV1 { private final String labelType; private final String property; private final Collection<Double> splitRates; public LabelConfigV1(String labelType, String property, Collection<Double> splitRates) { this.labelType = labelType; this.property = property; this.splitRates = splitRates; if (this.splitRates.size() != 3) { throw new IllegalArgumentException("split rates must contain 3 values"); } Optional<Double> sum = this.splitRates.stream().reduce(Double::sum); if (sum.orElse(0.0) != 1.0) { throw new IllegalArgumentException("split rate values must add up to 1.0"); } } public String property() { return property; } public Collection<Double> splitRates() { return splitRates; } public String labelType() { return labelType; } }
1,081
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/FeatureOverrideConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class FeatureOverrideConfigV1 { private final Label label; private final Collection<String> properties; private final FeatureTypeV1 featureType; private final Norm norm; private final Separator separator; public FeatureOverrideConfigV1(Label label, Collection<String> properties, FeatureTypeV1 featureType, Norm norm, Separator separator) { this.label = label; this.properties = properties; this.featureType = featureType; this.norm = norm; this.separator = separator; } public Label label() { return label; } public boolean isSinglePropertyOverride() { return properties.size() == 1; } public String firstProperty() { return properties.iterator().next(); } public Collection<String> properties() { return properties; } public FeatureTypeV1 featureType() { return featureType; } public Norm norm() { return norm; } public Separator separator() { return separator; } }
1,082
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseFeaturesV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.*; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureOverrideConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureTypeV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.NumericalBucketFeatureConfigV1; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseFeaturesV1 { private final Collection<JsonNode> nodes; public ParseFeaturesV1(Collection<JsonNode> nodes) { this.nodes = nodes; } public void validate() { for (JsonNode node : nodes) { if (!isWord2VecNodeFeature(node) && !isNumericalBucketFeature(node) && !isNodeFeatureOverride(node) && !isEdgeFeatureOverride(node)) { throw new IllegalArgumentException("Illegal feature element: expected category or numerical feature definitions for nodes and edges, or word2vec or bucket_numerical feature definitions for nodes"); } } } public Collection<Word2VecConfig> parseWord2VecNodeFeatures() { Collection<Word2VecConfig> word2VecFeatures = new ArrayList<>(); for (JsonNode node : nodes) { if (isWord2VecNodeFeature(node)) { String description = "word2vec feature"; ParsingContext context = new ParsingContext(FeatureTypeV1.word2vec.name() + " feature"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); String property = new ParseProperty(node, context.withLabel(nodeType)).parseSingleProperty(); Collection<String> language = new ParseWord2VecLanguage(node).parseLanguage(); Word2VecConfig config = new Word2VecConfig(nodeType, property, language); word2VecFeatures.add(config); } } return word2VecFeatures; } public Collection<NumericalBucketFeatureConfigV1> parseNumericalBucketFeatures() { Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures = new ArrayList<>(); for (JsonNode node : nodes) { if (isNumericalBucketFeature(node)) { String description = FeatureTypeV1.bucket_numerical.name(); ParsingContext context = new ParsingContext(FeatureTypeV1.bucket_numerical.name() + " feature"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); FeatureTypeV1.bucket_numerical.validateOverride(node, description, nodeType); String property = new ParseProperty(node, context.withLabel(nodeType)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(nodeType).withProperty(property); Range range = new ParseRange(node, "range", propertyContext).parseRange(); int bucketCount = new ParseBucketCountV1(node, propertyContext).parseBucketCount(); int slideWindowSize = new ParseSlideWindowSize(node, propertyContext).parseSlideWindowSize(); NumericalBucketFeatureConfigV1 config = new NumericalBucketFeatureConfigV1(nodeType, property, range, bucketCount, slideWindowSize); numericalBucketFeatures.add(config); } } return numericalBucketFeatures; } public Collection<FeatureOverrideConfigV1> parseNodeFeatureOverrides() { Collection<FeatureOverrideConfigV1> featureOverrides = new ArrayList<>(); for (JsonNode node : nodes) { if (isNodeFeatureOverride(node)) { String description = "node feature"; ParsingContext context = new ParsingContext("node feature"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); Collection<String> properties = new ParseProperty(node, context.withLabel(nodeType)).parseMultipleProperties(); ParsingContext propertiesContext = context.withLabel(nodeType).withProperties(properties); FeatureTypeV1 type = new ParseFeatureTypeV1(node, propertiesContext).parseFeatureType(); type.validateOverride(node, description, nodeType); Norm norm = new ParseNorm(node, propertiesContext).parseNorm(); Separator separator = new ParseSeparator(node).parseSeparator(); FeatureOverrideConfigV1 config = new FeatureOverrideConfigV1(nodeType, properties, type, norm, separator); featureOverrides.add(config); } } return featureOverrides; } public Collection<FeatureOverrideConfigV1> parseEdgeFeatureOverrides() { Collection<FeatureOverrideConfigV1> featureOverrides = new ArrayList<>(); for (JsonNode node : nodes) { if (isEdgeFeatureOverride(node)) { ParsingContext context = new ParsingContext("edge feature"); String description = "edge feature"; Label edgeType = new ParseEdgeType(node, context).parseEdgeType(); Collection<String> properties = new ParseProperty(node, context.withLabel(edgeType)).parseMultipleProperties(); ParsingContext propertiesContext = context.withLabel(edgeType).withProperties(properties); FeatureTypeV1 type = new ParseFeatureTypeV1(node, propertiesContext).parseFeatureType(); type.validateOverride(node, description, edgeType); Norm norm = new ParseNorm(node, propertiesContext).parseNorm(); Separator separator = new ParseSeparator(node).parseSeparator(); featureOverrides.add(new FeatureOverrideConfigV1(edgeType, properties, type, norm, separator)); } } return featureOverrides; } private boolean isWord2VecNodeFeature(JsonNode node) { return isNodeFeature(node) && isWord2VecType(node.get("type").textValue()); } private boolean isNumericalBucketFeature(JsonNode node) { return isNodeFeature(node) && isBucketNumericalType(node.get("type").textValue()); } private boolean isNodeFeatureOverride(JsonNode node) { if (isNodeFeature(node)) { String type = node.get("type").textValue(); return (isNumericalType(type) || isCategoricalType(type)); } return false; } private boolean isEdgeFeatureOverride(JsonNode node) { if (isEdgeFeature(node)) { String type = node.get("type").textValue(); return (isNumericalType(type) || isCategoricalType(type)); } return false; } private boolean isNodeFeature(JsonNode node) { return node.has("node") && node.has("type"); } private boolean isEdgeFeature(JsonNode node) { return node.has("edge") && node.has("type"); } private boolean isWord2VecType(String type) { return FeatureTypeV1.word2vec.name().equals(type); } private boolean isBucketNumericalType(String type) { return FeatureTypeV1.bucket_numerical.name().equals(type); } private boolean isCategoricalType(String type) { return FeatureTypeV1.category.name().equals(type); } private boolean isNumericalType(String type) { return FeatureTypeV1.numerical.name().equals(type); } }
1,083
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseFeatureTypeV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureTypeV1; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseFeatureTypeV1 { private final JsonNode json; private final ParsingContext context; public ParseFeatureTypeV1(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public FeatureTypeV1 parseFeatureType() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); if ( type.equals("numerical") || type.equals("category")){ return FeatureTypeV1.valueOf(type); } else { throw ErrorMessageHelper.invalidFieldValue("type", type, context, Arrays.asList("numerical", "category")); } } throw ErrorMessageHelper.errorParsingField("type", context, "'numerical' or 'category' value"); } }
1,084
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseBucketCountV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; public class ParseBucketCountV1 { private final JsonNode json; private final ParsingContext context; public ParseBucketCountV1(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public int parseBucketCount() { if (json.has("num_buckets") && json.path("num_buckets").isInt()) { return json.path("num_buckets").asInt(); } else { throw ErrorMessageHelper.errorParsingField("num_buckets", context, "an integer"); } } }
1,085
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseLabelsV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.LabelConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.Collection; import java.util.HashMap; import java.util.Map; public class ParseLabelsV1 { private final Collection<JsonNode> nodes; private final Collection<Double> defaultSplitRates; public ParseLabelsV1(Collection<JsonNode> nodes, Collection<Double> defaultSplitRates) { this.nodes = nodes; this.defaultSplitRates = defaultSplitRates; } public Map<Label, LabelConfigV1> parseNodeClassLabels() { Map<Label, LabelConfigV1> nodeClassLabels = new HashMap<>(); for (JsonNode node : nodes) { if (isNodeClass(node)) { ParsingContext context = new ParsingContext("node label"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); String property = new ParseProperty(node, context.withLabel(nodeType)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(nodeType).withProperty(property); String labelType = new ParseLabelTypeV1("node", node, propertyContext).parseLabel(); Collection<Double> splitRates = new ParseSplitRate(node, defaultSplitRates, propertyContext).parseSplitRates(); nodeClassLabels.put(nodeType, new LabelConfigV1(labelType, property, splitRates)); } } return nodeClassLabels; } public Map<Label, LabelConfigV1> parseEdgeClassLabels() { Map<Label, LabelConfigV1> edgeClassLabels = new HashMap<>(); for (JsonNode node : nodes) { if (isEdgeClass(node)) { ParsingContext context = new ParsingContext("edge label"); Label edgeType = new ParseEdgeType(node, context).parseEdgeType(); String property = new ParseProperty(node, context.withLabel(edgeType)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(edgeType).withProperty(property); String labelType = new ParseLabelTypeV1("edge", node, propertyContext).parseLabel(); Collection<Double> splitRates = new ParseSplitRate(node, defaultSplitRates, propertyContext).parseSplitRates(); edgeClassLabels.put(edgeType, new LabelConfigV1(labelType, property, splitRates)); } } return edgeClassLabels; } public void validate() { for (JsonNode node : nodes) { if (!isNodeClass(node) && !isEdgeClass(node)) { throw new IllegalArgumentException("Illegal label element. Expected 'node' or 'edge' field, and a 'property' field."); } } } private boolean isNodeClass(JsonNode node) { return node.has("node") && node.has("property"); } private boolean isEdgeClass(JsonNode node) { return node.has("edge") && node.has("property"); } }
1,086
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseLabelTypeV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseLabelTypeV1 { private final String prefix; private final JsonNode json; private final ParsingContext context; public ParseLabelTypeV1(String prefix, JsonNode json, ParsingContext context) { this.prefix = prefix; this.json = json; this.context = context; } public String parseLabel() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); if (type.equals("regression")) { return regressionLabel(); } else if (type.equals("classification")) { return classLabel(); } else { throw ErrorMessageHelper.invalidFieldValue("type", type, context, Arrays.asList("classification", "regression")); } } return classLabel(); } private String regressionLabel() { return String.format("%s_regression_label", prefix); } private String classLabel() { return String.format("%s_class_label", prefix); } }
1,087
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/PropertyGraphTrainingDataConfigWriterV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.*; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.stream.Collectors; public class PropertyGraphTrainingDataConfigWriterV2 { public static final PropertyName COLUMN_NAME_WITH_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithDataType(); } }; public static final PropertyName COLUMN_NAME_WITHOUT_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithoutDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithoutDataType(); } }; private final GraphSchema graphSchema; private final JsonGenerator generator; private final PropertyName propertyName; private final PrinterOptions printerOptions; private final TrainingDataWriterConfigV2 config; private final Collection<String> warnings = new ArrayList<>(); public PropertyGraphTrainingDataConfigWriterV2(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions) { this(graphSchema, generator, propertyName, printerOptions, new TrainingDataWriterConfigV2()); } public PropertyGraphTrainingDataConfigWriterV2(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions, TrainingDataWriterConfigV2 config) { this.graphSchema = graphSchema; this.generator = generator; this.propertyName = propertyName; this.printerOptions = printerOptions; this.config = config; } public void write() throws IOException { write(true); } public void write(boolean includeEdgeFeatures) throws IOException { generator.writeStartObject(); generator.writeStringField("version", "v2.0"); generator.writeStringField("query_engine", "gremlin"); generator.writeObjectFieldStart("graph"); writeNodes(); writeEdges(includeEdgeFeatures); generator.writeEndObject(); generator.writeArrayFieldStart("warnings"); writeWarnings(); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); } private void writeWarnings() throws IOException { for (String warning : warnings) { generator.writeString(warning); } } private void writeNodes() throws IOException { GraphElementType graphElementType = GraphElementType.nodes; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); generator.writeArrayFieldStart("nodes"); for (Label nodeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(nodeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(nodeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeCommaSeparator(); writeNodeType(labelSchema); writeNodeFeatures(labelSchema); writeNodeLabels(labelSchema); generator.writeEndObject(); } } generator.writeEndArray(); } private void writeEdges(boolean includeEdgeFeatures) throws IOException { GraphElementType graphElementType = GraphElementType.edges; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); generator.writeArrayFieldStart("edges"); for (Label edgeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(edgeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(edgeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeCommaSeparator(); writeEdgeType(labelSchema); if (includeEdgeFeatures){ writeEdgeFeatures(labelSchema); } writeEdgeLabels(labelSchema); generator.writeEndObject(); } } generator.writeEndArray(); } private void writeNodeType(LabelSchema labelSchema) throws IOException { generator.writeArrayFieldStart("node"); generator.writeString("~id"); generator.writeString(labelSchema.label().labelsAsString()); generator.writeEndArray(); } private void writeEdgeType(LabelSchema labelSchema) throws IOException { generator.writeArrayFieldStart("source"); generator.writeString("~from"); generator.writeString(labelSchema.label().fromLabelsAsString()); generator.writeEndArray(); generator.writeArrayFieldStart("relation"); generator.writeString(""); generator.writeString(labelSchema.label().labelsAsString()); generator.writeEndArray(); generator.writeArrayFieldStart("dest"); generator.writeString("~to"); generator.writeString(labelSchema.label().toLabelsAsString()); generator.writeEndArray(); } private void writeNodeFeatures(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); Collection<PropertySchema> propertySchemas = labelSchema.propertySchemas(); generator.writeArrayFieldStart("features"); ElementConfig nodeConfig = config.nodeConfig(); for (PropertySchema propertySchema : propertySchemas) { String column = propertySchema.nameWithoutDataType(); if (nodeConfig.hasClassificationSpecificationForProperty(label, column)) { continue; } if (!config.allowFeatureEncoding()) { writeNoneFeature(propertySchema); } else { if (nodeConfig.allowAutoInferFeature(label, column)) { writeAutoInferredFeature(propertySchema); } if (nodeConfig.hasNoneFeatureSpecification(label, column)) { writeNoneFeature(propertySchema); } if (nodeConfig.hasTfIdfSpecification(label, column)) { writeTfIdfFeature(propertySchema, nodeConfig.getTfIdfSpecification(label, column)); } if (nodeConfig.hasDatetimeSpecification(label, column)) { writeDatetimeFeature(propertySchema, nodeConfig.getDatetimeSpecification(label, column)); } if (nodeConfig.hasWord2VecSpecification(label, column)) { writeWord2VecFeature(propertySchema, nodeConfig.getWord2VecSpecification(label, column)); } if (nodeConfig.hasFastTextSpecification(label, column)) { writeFastTextFeature(propertySchema, nodeConfig.getFastTextSpecification(label, column)); } if (nodeConfig.hasSbertSpecification(label, column)) { writeSbertFeature(propertySchema, nodeConfig.getSbertSpecification(label, column)); } if (nodeConfig.hasNumericalBucketSpecification(label, column)) { writeNumericalBucketFeature(propertySchema, nodeConfig.getNumericalBucketSpecification(label, column)); } } } for (FeatureOverrideConfigV2 featureOverride : nodeConfig.getFeatureOverrides(label)) { writeFeatureOverride(labelSchema, featureOverride, nodeConfig); } generator.writeEndArray(); } private void writeEdgeFeatures(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); Collection<PropertySchema> propertySchemas = labelSchema.propertySchemas(); generator.writeArrayFieldStart("features"); ElementConfig edgeConfig = config.edgeConfig(); for (PropertySchema propertySchema : propertySchemas) { String column = propertySchema.nameWithoutDataType(); if (edgeConfig.hasClassificationSpecificationForProperty(label, column)) { continue; } if (!config.allowFeatureEncoding()) { writeNoneFeature(propertySchema); } else { if (edgeConfig.allowAutoInferFeature(label, column)) { writeAutoInferredFeature(propertySchema); } if (edgeConfig.hasNoneFeatureSpecification(label, column)) { writeNoneFeature(propertySchema); } if (edgeConfig.hasTfIdfSpecification(label, column)) { writeTfIdfFeature(propertySchema, edgeConfig.getTfIdfSpecification(label, column)); } if (edgeConfig.hasDatetimeSpecification(label, column)) { writeDatetimeFeature(propertySchema, edgeConfig.getDatetimeSpecification(label, column)); } if (edgeConfig.hasWord2VecSpecification(label, column)) { writeWord2VecFeature(propertySchema, edgeConfig.getWord2VecSpecification(label, column)); } if (edgeConfig.hasFastTextSpecification(label, column)) { writeFastTextFeature(propertySchema, edgeConfig.getFastTextSpecification(label, column)); } if (edgeConfig.hasSbertSpecification(label, column)) { writeSbertFeature(propertySchema, edgeConfig.getSbertSpecification(label, column)); } if (edgeConfig.hasNumericalBucketSpecification(label, column)) { writeNumericalBucketFeature(propertySchema, edgeConfig.getNumericalBucketSpecification(label, column)); } } } for (FeatureOverrideConfigV2 featureOverride : edgeConfig.getFeatureOverrides(label)) { writeFeatureOverride(labelSchema, featureOverride, edgeConfig); } generator.writeEndArray(); } private void writeNodeLabels(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); ElementConfig nodeConfig = config.nodeConfig(); if (nodeConfig.hasClassificationSpecificationsFor(label)) { generator.writeArrayFieldStart("labels"); for (LabelConfigV2 labelConfig : nodeConfig.getClassificationSpecifications(label)) { if (labelSchema.containsProperty(labelConfig.property())) { PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); writeLabel(propertySchema, labelConfig); } else { ParsingContext context = new ParsingContext("node classification property").withLabel(label).withProperty(labelConfig.property()); warnings.add(String.format("Unrecognized %s.", context)); } } generator.writeEndArray(); } } private void writeLabel(PropertySchema propertySchema, LabelConfigV2 labelConfig) throws IOException { generator.writeStartObject(); generator.writeArrayFieldStart("label"); generator.writeString(labelConfig.property()); generator.writeString(labelConfig.taskType()); generator.writeEndArray(); writeSplitRates(labelConfig); labelConfig.separator().writeTo(generator, propertySchema.isMultiValue()); generator.writeEndObject(); } private void writeEdgeLabels(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); ElementConfig edgeConfig = config.edgeConfig(); if (edgeConfig.hasClassificationSpecificationsFor(label)) { generator.writeArrayFieldStart("labels"); for (LabelConfigV2 labelConfig : edgeConfig.getClassificationSpecifications(label)) { if (StringUtils.isEmpty(labelConfig.property())) { writeLabel(new PropertySchema(""), labelConfig); } else if (labelSchema.containsProperty(labelConfig.property())) { PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); writeLabel(propertySchema, labelConfig); } else { ParsingContext context = new ParsingContext("edge classification property").withLabel(label).withProperty(labelConfig.property()); warnings.add(String.format("Unrecognized %s.", context)); } } generator.writeEndArray(); } } private void writeSplitRates(LabelConfigV2 labelConfig) throws IOException { generator.writeArrayFieldStart("split_rate"); for (Double rate : labelConfig.splitRates()) { generator.writeNumber(rate); } generator.writeEndArray(); } private void writeFeatureOverride(LabelSchema labelSchema, FeatureOverrideConfigV2 featureOverride, ElementConfig elementConfig) throws IOException { FeatureTypeV2 featureType = featureOverride.featureType(); Label label = labelSchema.label(); Collection<PropertySchema> propertySchemas = labelSchema.propertySchemas().stream() .filter(p -> featureOverride.properties().contains(p.nameWithoutDataType()) && !elementConfig.hasClassificationSpecificationForProperty(label, p.nameWithoutDataType())) .collect(Collectors.toList()); Collection<String> propertyNames = propertySchemas.stream() .map(PropertySchema::nameWithoutDataType) .collect(Collectors.toList()); Collection<String> missingProperties = featureOverride.properties().stream() .filter(p -> !propertyNames.contains(p)) .collect(Collectors.toList()); for (String missingProperty : missingProperties) { ParsingContext context = new ParsingContext(featureType.name() + " feature override").withLabel(label).withProperty(missingProperty); warnings.add(String.format("Unable to add %s. Property is missing, or is being used to label the node.", context)); } if (FeatureTypeV2.category == featureType) { writeCategoricalFeature(propertySchemas, featureOverride); } else if (FeatureTypeV2.numerical == featureType) { writeNumericalFeature(propertySchemas, featureOverride); } else if (FeatureTypeV2.auto == featureType) { writeAutoFeature(propertySchemas, featureOverride); } else if (FeatureTypeV2.none == featureType) { // Do nothing } else { warnings.add(String.format("Unsupported feature type override for node: %s.", featureType.name())); } } private void writeAutoInferredFeature(PropertySchema propertySchema) throws IOException { if (propertySchema.dataType() == DataType.String || propertySchema.dataType() == DataType.Boolean) { writeAutoFeature(Collections.singletonList(propertySchema), ImputerTypeV2.none); } if (propertySchema.dataType() == DataType.Byte || propertySchema.dataType() == DataType.Short || propertySchema.dataType() == DataType.Integer || propertySchema.dataType() == DataType.Long || propertySchema.dataType() == DataType.Float || propertySchema.dataType() == DataType.Double) { if (propertySchema.isMultiValue()) { writeAutoFeature(Collections.singletonList(propertySchema), ImputerTypeV2.median); } else { writeNumericalFeature( Collections.singletonList(propertySchema), Norm.min_max, ImputerTypeV2.median); } } if (propertySchema.dataType() == DataType.Date) { writeDatetimeFeature( Collections.singletonList(propertySchema), Arrays.asList( DatetimePartV2.year, DatetimePartV2.month, DatetimePartV2.weekday, DatetimePartV2.hour)); } } private void writeFeature(PropertySchema propertySchema, FeatureTypeV2 featureType) throws IOException { writeFeature(propertySchema, featureType.name()); } private void writeFeature(PropertySchema propertySchema, String featureType) throws IOException { generator.writeArrayFieldStart("feature"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); // column generator.writeString(propertyName.escaped(propertySchema, printerOptions)); // feature name generator.writeString(featureType); generator.writeEndArray(); } private void writeTfIdfFeature(PropertySchema propertySchema, TfIdfConfigV2 tfIdfSpecification) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_tfidf, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.text_tfidf); Range range = tfIdfSpecification.ngramRange(); if (range != null) { generator.writeArrayFieldStart("ngram_range"); generator.writeObject(range.low()); generator.writeObject(range.high()); generator.writeEndArray(); } Integer minDf = tfIdfSpecification.minDf(); if (minDf != null) { generator.writeNumberField("min_df", minDf); } Integer maxFeatures = tfIdfSpecification.maxFeatures(); if (maxFeatures != null) { generator.writeNumberField("max_features", maxFeatures); } generator.writeEndObject(); } private void writeNumericalBucketFeature(PropertySchema propertySchema, NumericalBucketFeatureConfigV2 numericalBucketSpecification) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.bucket_numerical, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.bucket_numerical); Range range = numericalBucketSpecification.range(); if (range != null) { generator.writeArrayFieldStart("range"); generator.writeObject(range.low()); generator.writeObject(range.high()); generator.writeEndArray(); } Integer bucketCount = numericalBucketSpecification.bucketCount(); if (bucketCount != null) { generator.writeNumberField("bucket_cnt", bucketCount); } Integer slideWindowSize = numericalBucketSpecification.slideWindowSize(); if (slideWindowSize != null) { generator.writeNumberField("slide_window_size", slideWindowSize); } ImputerTypeV2 imputer = numericalBucketSpecification.imputerType(); if (imputer != null && imputer != ImputerTypeV2.none) { generator.writeStringField("imputer", imputer.formattedName()); } else { warnings.add(String.format("'imputer' value missing for %s feature for '%s'. Preprocessing will exit when it encounters an missing value.", FeatureTypeV2.bucket_numerical, propertySchema.nameWithoutDataType())); } generator.writeEndObject(); } private void writeWord2VecFeature(PropertySchema propertySchema, Word2VecConfig word2VecSpecification) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_word2vec, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.text_word2vec); if (!word2VecSpecification.languages().isEmpty()) { generator.writeArrayFieldStart("language"); for (String language : word2VecSpecification.languages()) { generator.writeString(language); try { SupportedWord2VecLanguages.valueOf(language); } catch (IllegalArgumentException e) { warnings.add(String.format("Unsupported language for text_word2vec feature for '%s': '%s'. " + "Supported languages are: %s. " + "The output embedding is not guaranteed to be valid if you supply another language.", propertySchema.nameWithoutDataType(), language, ErrorMessageHelper.quoteList(Arrays.stream(SupportedWord2VecLanguages.values()).map(Enum::name).collect(Collectors.toList())))); } } generator.writeEndArray(); } generator.writeEndObject(); } private void writeFastTextFeature(PropertySchema propertySchema, FastTextConfig fastTextConfig) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_fasttext, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.text_fasttext); String language = fastTextConfig.language(); try { SupportedFastTextLanguages.valueOf(language); } catch (IllegalArgumentException e) { warnings.add(String.format("Unsupported language for text_fasttext feature for '%s': '%s'. " + "Supported languages are: %s. " + "The output embedding is not guaranteed to be valid if you supply another language.", propertySchema.nameWithoutDataType(), language, ErrorMessageHelper.quoteList(Arrays.stream(SupportedFastTextLanguages.values()).map(Enum::name).collect(Collectors.toList())))); } generator.writeStringField("language", language); if (fastTextConfig.maxLength().isPresent()){ generator.writeNumberField("max_length", fastTextConfig.maxLength().get()); } generator.writeEndObject(); } private void writeSbertFeature(PropertySchema propertySchema, SbertConfig sbertConfig) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_sbert, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, sbertConfig.name()); generator.writeEndObject(); } private void writeNoneFeature(PropertySchema propertySchema) throws IOException { writeNoneFeature(Collections.singletonList(propertySchema)); } private void writeNoneFeature(Collection<PropertySchema> propertySchemas) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.none); generator.writeEndObject(); } } private void writeDatetimeFeature(PropertySchema propertySchema, DatetimeConfigV2 datetimeConfig) throws IOException { writeDatetimeFeature(Collections.singletonList(propertySchema), datetimeConfig.datetimeParts()); } private void writeDatetimeFeature(Collection<PropertySchema> propertySchemas, Collection<DatetimePartV2> datetimeParts) throws IOException { for (PropertySchema propertySchema : propertySchemas) { if (propertySchema.isMultiValue()) { warnings.add(String.format("Unable to add datetime feature for '%s'. Multi-value datetime features not currently supported. Adding an auto feature instead.", propertySchema.nameWithoutDataType())); writeAutoFeature(Collections.singletonList(propertySchema), ImputerTypeV2.none); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.datetime); if (!datetimeParts.isEmpty()) { generator.writeArrayFieldStart("datetime_parts"); for (DatetimePartV2 datetimePart : datetimeParts) { generator.writeString(datetimePart.name()); } generator.writeEndArray(); } generator.writeEndObject(); } } private void writeNumericalFeature(Collection<PropertySchema> propertySchemas, FeatureOverrideConfigV2 featureOverride) throws IOException { writeNumericalFeature(propertySchemas, featureOverride.norm(), featureOverride.imputer(), featureOverride.separator()); } private void writeNumericalFeature(Collection<PropertySchema> propertySchemas, Norm norm, ImputerTypeV2 imputer) throws IOException { writeNumericalFeature(propertySchemas, norm, imputer, new Separator()); } private void writeNumericalFeature(Collection<PropertySchema> propertySchemas, Norm norm, ImputerTypeV2 imputer, Separator separator) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.numerical); separator.writeTo(generator, propertySchema.isMultiValue()); generator.writeStringField("norm", norm.formattedName()); if (imputer != ImputerTypeV2.none) { generator.writeStringField("imputer", imputer.formattedName()); } generator.writeEndObject(); } } private void writeCategoricalFeature(Collection<PropertySchema> propertySchemas, FeatureOverrideConfigV2 featureOverride) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.category); featureOverride.separator().writeTo(generator, propertySchema.isMultiValue()); generator.writeEndObject(); } } private void writeAutoFeature(Collection<PropertySchema> propertySchemas, FeatureOverrideConfigV2 featureOverride) throws IOException { writeAutoFeature(propertySchemas, featureOverride.imputer(), featureOverride.separator()); } private void writeAutoFeature(Collection<PropertySchema> propertySchemas, ImputerTypeV2 imputer) throws IOException { writeAutoFeature(propertySchemas, imputer, new Separator()); } private void writeAutoFeature(Collection<PropertySchema> propertySchemas, ImputerTypeV2 imputer, Separator separator) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.auto); separator.writeTo(generator, propertySchema.isMultiValue()); if (imputer != ImputerTypeV2.none) { generator.writeStringField("imputer", imputer.formattedName()); } generator.writeEndObject(); } } private void writeFileName(GraphElementType graphElementType, String outputId) throws IOException { generator.writeStringField("file_name", String.format("%s/%s", graphElementType.name(), new File(outputId).getName())); } private void writeCommaSeparator() throws IOException { generator.writeStringField("separator", ","); } }
1,088
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/RdfTrainingDataConfigWriter.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.EdgeTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.LabelConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.RdfTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; public class RdfTrainingDataConfigWriter { private final Collection<String> filenames; private final JsonGenerator generator; private final TrainingDataWriterConfigV2 config; private final Collection<String> warnings = new ArrayList<>(); public RdfTrainingDataConfigWriter(Collection<String> filenames, JsonGenerator generator, TrainingDataWriterConfigV2 config) { this.filenames = filenames; this.generator = generator; this.config = config; } public void write() throws IOException { generator.writeStartObject(); generator.writeStringField("version", "v2.0"); generator.writeStringField("query_engine", "sparql"); generator.writeObjectFieldStart("graph"); writeRdfs(); generator.writeEndObject(); generator.writeArrayFieldStart("warnings"); writeWarnings(); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); } private void writeWarnings() throws IOException { for (String warning : warnings) { generator.writeString(warning); } } private void writeRdfs() throws IOException { generator.writeArrayFieldStart("rdfs"); Collection<LabelConfigV2> classificationSpecifications = config.nodeConfig().getAllClassificationSpecifications(); if (classificationSpecifications.isEmpty()) { for (String filename : filenames) { generator.writeStartObject(); generator.writeStringField("file_name", filename); generator.writeObjectFieldStart("label"); generator.writeStringField("task_type", EdgeTaskTypeV2.link_prediction.name()); generator.writeArrayFieldStart("targets"); generator.writeStartObject(); generator.writeArrayFieldStart("split_rate"); for (Double splitRate : config.defaultSplitRates()) { generator.writeNumber(splitRate); } generator.writeEndArray(); generator.writeEndObject(); generator.writeEndArray(); generator.writeEndObject(); generator.writeEndObject(); } } else { for (RdfTaskTypeV2 taskType : RdfTaskTypeV2.values()) { List<LabelConfigV2> taskSpecificConfigs = classificationSpecifications.stream().filter(c -> c.taskType().equals(taskType.name())).collect(Collectors.toList()); if (taskSpecificConfigs.isEmpty()) { continue; } if (taskType == RdfTaskTypeV2.link_prediction) { for (String filename : filenames) { generator.writeStartObject(); generator.writeStringField("file_name", filename); generator.writeObjectFieldStart("label"); generator.writeStringField("task_type", taskType.name()); generator.writeArrayFieldStart("targets"); for (LabelConfigV2 taskSpecificConfig : taskSpecificConfigs) { generator.writeStartObject(); if (StringUtils.isNotEmpty(taskSpecificConfig.subject())) { generator.writeStringField("subject", taskSpecificConfig.subject()); } else { warnings.add("'subject' field is missing for link_prediction task, so all edges will be treated as the training target."); } if (StringUtils.isNotEmpty(taskSpecificConfig.property())) { generator.writeStringField("predicate", taskSpecificConfig.property()); }else { warnings.add("'predicate' field is missing for link_prediction task, so all edges will be treated as the training target."); } if (StringUtils.isNotEmpty(taskSpecificConfig.object())) { generator.writeStringField("object", taskSpecificConfig.object()); }else { warnings.add("'object' field is missing for link_prediction task, so all edges will be treated as the training target."); } generator.writeArrayFieldStart("split_rate"); for (Double splitRate : taskSpecificConfig.splitRates()) { generator.writeNumber(splitRate); } generator.writeEndArray(); generator.writeEndObject(); } generator.writeEndArray(); generator.writeEndObject(); generator.writeEndObject(); } } else { for (String filename : filenames) { generator.writeStartObject(); generator.writeStringField("file_name", filename); generator.writeObjectFieldStart("label"); generator.writeStringField("task_type", taskType.name()); generator.writeArrayFieldStart("targets"); for (LabelConfigV2 taskSpecificConfig : taskSpecificConfigs) { generator.writeStartObject(); generator.writeStringField("node", taskSpecificConfig.label().labelsAsString()); String property = taskSpecificConfig.property(); if (StringUtils.isNotEmpty(property)){ generator.writeStringField("predicate", property); } else { warnings.add(String.format("'predicate' field is missing for %s task. If the target nodes have more than one predicate defining the target node feature, the training task will fail with an error.", taskType)); } generator.writeArrayFieldStart("split_rate"); for (Double splitRate : taskSpecificConfig.splitRates()) { generator.writeNumber(splitRate); } generator.writeEndArray(); generator.writeEndObject(); } generator.writeEndArray(); generator.writeEndObject(); generator.writeEndObject(); } } } } generator.writeEndArray(); } }
1,089
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/RdfTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; public enum RdfTaskTypeV2 { classification, regression, link_prediction; public void validate(String predicate, Label label) { // Do nothing // ParsingContext context = new ParsingContext(String.format("node %s specification", name()), NeptuneMLSourceDataModel.RDF).withLabel(label); // if (StringUtils.isEmpty(predicate)) { // throw new IllegalArgumentException(String.format("Missing or empty 'predicate' field for %s.", context)); // } } }
1,090
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/ImputerTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import java.util.ArrayList; import java.util.Collection; public enum ImputerTypeV2 { mean, median, most_frequent{ @Override public String formattedName() { return "most-frequent"; } }, none{ @Override public boolean isPublic(){ return false; } }; public String formattedName() { return name(); } public boolean isPublic() { return true; } @Override public String toString() { return formattedName(); } public static ImputerTypeV2 fromString(String s) { for (ImputerTypeV2 imputerType : ImputerTypeV2.values()) { if (imputerType.formattedName().equals(s)) { return imputerType; } } throw new IllegalArgumentException(String.format("Invalid imputer type: %s (valid types are: %s)", s, String.join(", ", publicFormattedNames()))); } public static Collection<String> publicFormattedNames() { Collection<String> results = new ArrayList<>(); for (ImputerTypeV2 imputerType : ImputerTypeV2.values()) { if (imputerType.isPublic()){ results.add(imputerType.formattedName()); } } return results; } }
1,091
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/FeatureOverrideConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class FeatureOverrideConfigV2 { private final Label label; private final Collection<String> properties; private final FeatureTypeV2 featureType; private final Norm norm; private final Separator separator; private final ImputerTypeV2 imputerType; public FeatureOverrideConfigV2(Label label, Collection<String> properties, FeatureTypeV2 featureType, Norm norm, Separator separator, ImputerTypeV2 imputerType) { this.label = label; this.properties = properties; this.featureType = featureType; this.norm = norm; this.separator = separator; this.imputerType = imputerType; } public Label label() { return label; } public boolean isSinglePropertyOverride() { return properties.size() == 1; } public String firstProperty() { return properties.iterator().next(); } public Collection<String> properties() { return properties; } public FeatureTypeV2 featureType() { return featureType; } public Norm norm() { return norm; } public Separator separator() { return separator; } public ImputerTypeV2 imputer() { return imputerType; } }
1,092
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/DatetimeConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class DatetimeConfigV2 { private final Label label; private final String property; private final Collection<DatetimePartV2> datetimeParts; public DatetimeConfigV2(Label label, String property, Collection<DatetimePartV2> datetimeParts) { this.label = label; this.property = property; this.datetimeParts = datetimeParts; } public Label label() { return label; } public String property() { return property; } public Collection<DatetimePartV2> datetimeParts() { return datetimeParts; } @Override public String toString() { return "DatetimeConfigV2{" + "label=" + label + ", property='" + property + '\'' + ", datetimeParts=" + datetimeParts + '}'; } }
1,093
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/DatetimePartV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; public enum DatetimePartV2 { hour, weekday, month, year }
1,094
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/LabelConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class LabelConfigV2 { private final Label nodeType; private final String taskType; private final String property; private final String subject; private final String object; private final Collection<Double> splitRates; private final Separator separator; public LabelConfigV2(Label nodeType, String taskType, String property, String subject, String object, Collection<Double> splitRates, Separator separator) { this.nodeType = nodeType; this.taskType = taskType; this.property = property; this.subject = subject; this.object = object; this.splitRates = splitRates; this.separator = separator; } public Label label() { return nodeType; } public String taskType() { return taskType; } public String property() { return property; } public String subject() { return subject; } public String object() { return object; } public Collection<Double> splitRates() { return splitRates; } public Separator separator() { return separator; } }
1,095
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TfIdfConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.propertygraph.Label; public class TfIdfConfigV2 { private final Label label; private final String property; private final Range ngramRange; private final int minDf; private final int maxFeatures; public TfIdfConfigV2(Label label, String property, Range ngramRange, int minDf, int maxFeatures) { this.label = label; this.property = property; this.ngramRange = ngramRange; this.minDf = minDf; this.maxFeatures = maxFeatures; } public Label label() { return label; } public String property() { return property; } public Range ngramRange() { return ngramRange; } public Integer minDf() { return minDf; } public Integer maxFeatures() { return maxFeatures; } @Override public String toString() { return "TfIdfConfigV2{" + "label=" + label + ", property='" + property + '\'' + ", ngramRange=" + ngramRange + ", minDf=" + minDf + ", maxFeatures=" + maxFeatures + '}'; } }
1,096
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TrainingDataWriterConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseSplitRate; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseFeaturesV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseLabelsV2; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Set; import java.util.stream.Collectors; public class TrainingDataWriterConfigV2 { public static final Collection<Double> DEFAULT_SPLIT_RATES_V2 = Arrays.asList(0.9, 0.1, 0.0); private static final String DEFAULT_NAME_V2 = "training-data-configuration"; public static Collection<TrainingDataWriterConfigV2> fromJson(JsonNode json, NeptuneMLSourceDataModel dataModel) { Collection<TrainingDataWriterConfigV2> results = new ArrayList<>(); if (json.isArray()) { ArrayNode configNodes = (ArrayNode) json; int index = 1; for (JsonNode configNode : configNodes) { results.add(getTrainingJobWriterConfig(configNode, index++, dataModel)); } } else { if (json.has("jobs")) { ArrayNode configNodes = (ArrayNode) json.get("jobs"); int index = 1; for (JsonNode configNode : configNodes) { results.add(getTrainingJobWriterConfig(configNode, index++, dataModel)); } } else { results.add(getTrainingJobWriterConfig(json, 1, dataModel)); } } Set<String> names = results.stream().map(TrainingDataWriterConfigV2::name).collect(Collectors.toSet()); if (names.size() < results.size()) { throw new IllegalStateException(String.format("Training data configuration names must be unique: %s", names)); } return results; } private static TrainingDataWriterConfigV2 getTrainingJobWriterConfig(JsonNode json, int index, NeptuneMLSourceDataModel dataModel) { Collection<Double> defaultSplitRates = new ParseSplitRate(json, DEFAULT_SPLIT_RATES_V2, new ParsingContext("config")).parseSplitRates(); Collection<LabelConfigV2> nodeClassLabels = new ArrayList<>(); Collection<LabelConfigV2> edgeClassLabels = new ArrayList<>(); Collection<NoneFeatureConfig> noneNodeFeatures = new ArrayList<>(); Collection<TfIdfConfigV2> tfIdfNodeFeatures = new ArrayList<>(); Collection<DatetimeConfigV2> datetimeNodeFeatures = new ArrayList<>(); Collection<Word2VecConfig> word2VecNodeFeatures = new ArrayList<>(); Collection<FastTextConfig> fastTextNodeFeatures = new ArrayList<>(); Collection<SbertConfig> sbertNodeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV2> numericalBucketNodeFeatures = new ArrayList<>(); Collection<NoneFeatureConfig> noneEdgeFeatures = new ArrayList<>(); Collection<TfIdfConfigV2> tfIdfEdgeFeatures = new ArrayList<>(); Collection<DatetimeConfigV2> datetimeEdgeFeatures = new ArrayList<>(); Collection<Word2VecConfig> word2VecEdgeFeatures = new ArrayList<>(); Collection<FastTextConfig> fastTextEdgeFeatures = new ArrayList<>(); Collection<SbertConfig> sbertEdgeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV2> numericalBucketEdgeFeatures = new ArrayList<>(); Collection<FeatureOverrideConfigV2> nodeFeatureOverrides = new ArrayList<>(); Collection<FeatureOverrideConfigV2> edgeFeatureOverrides = new ArrayList<>(); String name = json.has("name") ? json.get("name").textValue() : index > 1 ? String.format("%s-%s", DEFAULT_NAME_V2, index) : DEFAULT_NAME_V2; FeatureEncodingFlag featureEncodingFlag = FeatureEncodingFlag.auto; if (json.has("feature_encoding")) { try { featureEncodingFlag = FeatureEncodingFlag.valueOf(json.path("feature_encoding").textValue()); } catch (IllegalArgumentException e) { // Use default value of auto } } if (json.has("targets")) { JsonNode labels = json.path("targets"); Collection<JsonNode> labelNodes = new ArrayList<>(); if (labels.isArray()) { labels.forEach(labelNodes::add); } else { labelNodes.add(labels); } ParseLabelsV2 parseLabels = new ParseLabelsV2(labelNodes, defaultSplitRates, dataModel); parseLabels.validate(); nodeClassLabels.addAll(parseLabels.parseNodeClassLabels()); edgeClassLabels.addAll(parseLabels.parseEdgeClassLabels()); } if (json.has("features")) { JsonNode features = json.path("features"); Collection<JsonNode> featureNodes = new ArrayList<>(); if (features.isArray()) { features.forEach(featureNodes::add); } else { featureNodes.add(features); } ParseFeaturesV2 parseFeatures = new ParseFeaturesV2(featureNodes); parseFeatures.validate(); noneNodeFeatures.addAll(parseFeatures.parseNoneFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); tfIdfNodeFeatures.addAll(parseFeatures.parseTfIdfFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); datetimeNodeFeatures.addAll(parseFeatures.parseDatetimeFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); word2VecNodeFeatures.addAll(parseFeatures.parseWord2VecFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); fastTextNodeFeatures.addAll(parseFeatures.parseFastTextFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); sbertNodeFeatures.addAll(parseFeatures.parseSbertFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); numericalBucketNodeFeatures.addAll(parseFeatures.parseNumericalBucketFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); noneEdgeFeatures.addAll(parseFeatures.parseNoneFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); tfIdfEdgeFeatures.addAll(parseFeatures.parseTfIdfFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); datetimeEdgeFeatures.addAll(parseFeatures.parseDatetimeFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); word2VecEdgeFeatures.addAll(parseFeatures.parseWord2VecFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); fastTextEdgeFeatures.addAll(parseFeatures.parseFastTextFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); sbertEdgeFeatures.addAll(parseFeatures.parseSbertFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); numericalBucketEdgeFeatures.addAll(parseFeatures.parseNumericalBucketFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); nodeFeatureOverrides.addAll(parseFeatures.parseNodeFeatureOverrides()); edgeFeatureOverrides.addAll(parseFeatures.parseEdgeFeatureOverrides()); } ElementConfig nodeConfig = new ElementConfig( nodeClassLabels, noneNodeFeatures, tfIdfNodeFeatures, datetimeNodeFeatures, word2VecNodeFeatures, fastTextNodeFeatures, sbertNodeFeatures, numericalBucketNodeFeatures, nodeFeatureOverrides); ElementConfig edgeConfig = new ElementConfig( edgeClassLabels, noneEdgeFeatures, tfIdfEdgeFeatures, datetimeEdgeFeatures, word2VecEdgeFeatures, fastTextEdgeFeatures, sbertEdgeFeatures, numericalBucketEdgeFeatures, edgeFeatureOverrides); return new TrainingDataWriterConfigV2(name, featureEncodingFlag, defaultSplitRates, nodeConfig, edgeConfig); } private final String name; private final FeatureEncodingFlag featureEncodingFlag; private final Collection<Double> defaultSplitRates; private final ElementConfig nodeConfig; private final ElementConfig edgeConfig; public TrainingDataWriterConfigV2() { this(DEFAULT_NAME_V2, FeatureEncodingFlag.auto, DEFAULT_SPLIT_RATES_V2, ElementConfig.EMPTY_CONFIG, ElementConfig.EMPTY_CONFIG); } public TrainingDataWriterConfigV2(String name, FeatureEncodingFlag featureEncodingFlag, Collection<Double> defaultSplitRates, ElementConfig nodeConfig, ElementConfig edgeConfig) { this.name = name; this.featureEncodingFlag = featureEncodingFlag; this.defaultSplitRates = defaultSplitRates; this.nodeConfig = nodeConfig; this.edgeConfig = edgeConfig; } public String name() { return name; } public boolean allowFeatureEncoding() { return featureEncodingFlag == FeatureEncodingFlag.auto; } public Collection<Double> defaultSplitRates() { return defaultSplitRates; } public ElementConfig nodeConfig() { return nodeConfig; } public ElementConfig edgeConfig() { return edgeConfig; } @Override public String toString() { return "TrainingDataWriterConfigV2{" + "name='" + name + '\'' + ", defaultSplitRates=" + defaultSplitRates + ", nodeConfig=" + nodeConfig + ", edgeConfig=" + edgeConfig + '}'; } }
1,097
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/NumericalBucketFeatureConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.propertygraph.Label; public class NumericalBucketFeatureConfigV2 { private final Label label; private final String property; private final Range range; private final int bucketCount; private final int slideWindowSize; private final ImputerTypeV2 imputerType; public NumericalBucketFeatureConfigV2(Label label, String property, Range range, int bucketCount, int slideWindowSize, ImputerTypeV2 imputerType) { this.label = label; this.property = property; this.range = range; this.bucketCount = bucketCount; this.slideWindowSize = slideWindowSize; this.imputerType = imputerType; } public Label label() { return label; } public String property() { return property; } public Integer bucketCount() { return bucketCount; } public Integer slideWindowSize() { return slideWindowSize; } public Range range() { return range; } public ImputerTypeV2 imputerType() { return imputerType; } @Override public String toString() { return "NumericalBucketFeatureConfig{" + "label=" + label + ", property='" + property + '\'' + ", range=" + range + ", bucketCount=" + bucketCount + ", slideWindowSize=" + slideWindowSize + ", imputerType=" + imputerType + '}'; } }
1,098
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/FeatureTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; public enum FeatureTypeV2 { bucket_numerical { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("separator")) { throw new IllegalArgumentException(String.format("Invalid 'separator' field for %s. Bucket numerical feature property cannot contain multiple values.", context)); } } }, text_word2vec { @Override public Collection<String> validNames(){ return Arrays.asList(name(), "word2vec"); } @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_fasttext { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_sbert { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_sbert128 { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_sbert512 { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, category { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, numerical, text_tfidf { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, datetime, auto, none; public void validateOverride(JsonNode node, ParsingContext context) { //Do nothing } public Collection<String> validNames(){ return Collections.singletonList(name()); } public static FeatureTypeV2 fromString(String s) { for (FeatureTypeV2 featureType : FeatureTypeV2.values()) { for (String validName : featureType.validNames()) { if (validName.equals(s)){ return featureType; } } } throw new IllegalArgumentException(String.format("Invalid feature type: %s (valid types are: %s)", s, String.join(", ", publicFormattedNames()))); } public static Collection<String> publicFormattedNames() { Collection<String> results = new ArrayList<>(); for (FeatureTypeV2 featureType : FeatureTypeV2.values()) { results.add(featureType.name()); } return results; } }
1,099