index
int64 0
0
| repo_id
stringlengths 9
205
| file_path
stringlengths 31
246
| content
stringlengths 1
12.2M
| __index_level_0__
int64 0
10k
|
---|---|---|---|---|
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/EdgeLabelStrategyTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeLabelsOnly;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeAndVertexLabels;
import static org.junit.Assert.assertEquals;
public class EdgeLabelStrategyTest {
private final GraphTraversalSource gmodern;
private final Map<String, Object> inputMap;
private final PGEdgeResult pgEdgeResult;
private final List<String> fromLabels;
private final List<String> toLabels;
public EdgeLabelStrategyTest() {
gmodern = TinkerFactory.createModern().traversal();
inputMap = new HashMap<>();
inputMap.put("~label", "TestLabel");
inputMap.put("~from", "FromID");
inputMap.put("~to", "ToID");
fromLabels = new ArrayList<>();
toLabels = new ArrayList<>();
fromLabels.add("FromLabel");
toLabels.add("ToLabels");
inputMap.put("~fromLabels", fromLabels);
inputMap.put("~toLabels", toLabels);
pgEdgeResult = new PGEdgeResult(inputMap);
}
//Edge Labels Only
@Test
public void shouldGetEdgeLabelsFromModernGraph() {
Collection<Label> labels = edgeLabelsOnly.getLabels(gmodern);
Collection<Label> expected = new HashSet<>();
expected.add(new Label("knows"));
expected.add(new Label("created"));
assertEquals(expected, labels);
}
@Test
public void shouldGetEdgeLabelForMap() {
assertEquals(new Label("TestLabel"), edgeLabelsOnly.getLabelFor(inputMap));
}
@Test
public void shouldGetEdgeLabelForPgEdgeResult() {
assertEquals(new Label("TestLabel"), edgeLabelsOnly.getLabelFor(pgEdgeResult));
}
// Edge and Vertex Labels
@Test
public void shouldGetEdgeAndVertexLabelsFromModernGraph() {
Collection<Label> labels = edgeAndVertexLabels.getLabels(gmodern);
Collection<Label> expected = new HashSet<>();
expected.add(new Label("(person)-knows-(person)"));
expected.add(new Label("(person)-created-(software)"));
assertEquals(expected, labels);
}
@Test
public void shouldGetEdgeAndVertexLabelForMap() {
assertEquals(new Label("TestLabel", fromLabels, toLabels), edgeAndVertexLabels.getLabelFor(inputMap));
}
@Test
public void shouldGetEdgeAndVertexLabelForPgEdgeResult() {
assertEquals(new Label("TestLabel", fromLabels, toLabels), edgeAndVertexLabels.getLabelFor(pgEdgeResult));
}
}
| 4,100 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/ExportStatsTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class ExportStatsTest {
@Test
public void testExportStats() throws JsonProcessingException {
ExportStats stats = new ExportStats();
ObjectNode neptuneExportNode = JsonNodeFactory.instance.objectNode();
GraphSchema schema = GraphSchema.fromJson(new ObjectMapper().readTree(
"{\"nodes\":[" +
"{\"label\":\"node1\"," +
"\"properties\":[" +
"{\"property\":\"prop1\",\"dataType\":\"String\",\"isMultiValue\":false,\"isNullable\":false,\"allTypes\":[\"String\"]}," +
"{\"property\":\"prop2\",\"dataType\":\"Double\",\"isMultiValue\":true,\"isNullable\":true,\"allTypes\":[\"Double\",\"Float\"]}]}," +
"{\"label\":\"node2\"," +
"\"properties\":[]}" +
"]," +
"\"edges\":[" +
"{\"label\":\"edge1\"," +
"\"properties\":[" +
"{\"property\":\"prop1\",\"dataType\":\"String\",\"isMultiValue\":false,\"isNullable\":false,\"allTypes\":[\"String\"]}," +
"{\"property\":\"prop2\",\"dataType\":\"Double\",\"isMultiValue\":true,\"isNullable\":true,\"allTypes\":[\"Double\",\"Float\"]}]}," +
"{\"label\":\"edge2\"," +
"\"properties\":[]}" +
"]}"
));
stats.incrementNodeStats(new Label("node1"));
stats.incrementNodeStats(new Label("node2"));
stats.incrementEdgeStats(new Label("edge1"));
stats.incrementEdgeStats(new Label("edge2"));
stats.addTo(neptuneExportNode, schema);
String formattedStats = stats.formatStats(schema);
String expectedStats =
"Source:\n" +
" Nodes: 0\n" +
" Edges: 0\n" +
"Export:\n" +
" Nodes: 2\n" +
" Edges: 2\n" +
" Properties: 0\n" +
"Details:\n" +
" Nodes: \n" +
" node1: 1\n" +
" |_ prop1 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n" +
" |_ prop2 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n" +
" node2: 1\n" +
" Edges: \n" +
" edge2: 1\n" +
" edge1: 1\n" +
" |_ prop1 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n" +
" |_ prop2 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n";
assertEquals(expectedStats, formattedStats);
}
}
| 4,101 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/RangeFactoryTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.cluster.ConcurrencyConfig;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.util.NotImplementedException;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.junit.Test;
import java.util.Collection;
import java.util.Map;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class RangeFactoryTest {
private static final LabelsFilter ALL_LABELS = new AllLabels(new LabelStrategy() {
@Override
public Collection<Label> getLabels(GraphTraversalSource g) {
throw new NotImplementedException();
}
@Override
public Label getLabelFor(Map<String, Object> input) {
throw new NotImplementedException();
}
@Override
public Label getLabelFor(PGResult input) {
throw new NotImplementedException();
}
@Override
public String[] additionalColumns(String... columns) {
throw new NotImplementedException();
}
@Override
public <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t) {
throw new NotImplementedException();
}
});
@Test
public void shouldReturnConsecutiveRanges(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(2250L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(1000, 0, 2500, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(0, 1000)", range1.toString());
Range range2 = rangeFactory.nextRange();
assertEquals("range(1000, 2000)", range2.toString());
Range range3 = rangeFactory.nextRange();
assertEquals("range(2000, 2500)", range3.toString());
assertFalse(range3.isEmpty());
Range range4 = rangeFactory.nextRange();
assertEquals("range(-1, -1)", range4.toString());
assertTrue(range4.isEmpty());
}
@Test
public void shouldReturnSingleRangeForAllIfRangeSizeIsMinusOne(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(2250L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(-1, 0, Long.MAX_VALUE, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(0, -1)", range1.toString());
assertFalse(range1.isEmpty());
Range range2 = rangeFactory.nextRange();
assertEquals("range(-1, -1)", range2.toString());
assertTrue(range2.isEmpty());
}
@Test
public void shouldLeaveLastRangeOpenIfNoUpperLimit(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(2250L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(1000, 0, Long.MAX_VALUE, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(0, 1000)", range1.toString());
Range range2 = rangeFactory.nextRange();
assertEquals("range(1000, 2000)", range2.toString());
Range range3 = rangeFactory.nextRange();
assertEquals("range(2000, -1)", range3.toString());
assertFalse(range3.isEmpty());
Range range4 = rangeFactory.nextRange();
assertEquals("range(-1, -1)", range4.toString());
assertTrue(range4.isEmpty());
}
@Test
public void shouldIndicateThatItIsExhausted(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(5000L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(1000, 0, 2000, -1, -1),
new ConcurrencyConfig(1));
rangeFactory.nextRange();
assertFalse(rangeFactory.isExhausted());
rangeFactory.nextRange();
assertTrue(rangeFactory.isExhausted());
}
@Test
public void shouldCalculateRangesStartingFromSkipNumber(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(30L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(10, 20, 10, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(20, 30)", range1.toString());
assertTrue(rangeFactory.isExhausted());
}
}
| 4,102 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/EdgesClientTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.GraphElementHandler;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class EdgesClientTest {
private EdgesClient client;
private GraphTraversalSource graphTraversalSource;
private ExportStats mockStats;
private FeatureToggles mockFeatures;
@Before
public void setup() {
graphTraversalSource = TinkerFactory.createModern().traversal();
mockStats = mock(ExportStats.class);
mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new EdgesClient(graphTraversalSource, false, mockStats, mockFeatures);
}
@Test
public void testQueryForSchema() throws JsonProcessingException {
GraphSchema schema = new GraphSchema();
client.queryForSchema(
new GraphElementHandler<Map<?, Object>>() {
@Override
public void handle(Map<?, Object> properties, boolean allowTokens) throws IOException {
schema.update(GraphElementType.edges, properties, allowTokens);
}
@Override
public void close() {}
},
Range.ALL, new AllLabels(EdgeLabelStrategy.edgeLabelsOnly), GremlinFilters.EMPTY);
JsonNode expectedSchema = new ObjectMapper().readTree(
"{\n" +
" \"edges\" : [ {\n" +
" \"label\" : \"knows\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"weight\",\n" +
" \"dataType\" : \"Double\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"Double\" ]\n" +
" } ]\n" +
" }, {\n" +
" \"label\" : \"created\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"weight\",\n" +
" \"dataType\" : \"Double\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"Double\" ]\n" +
" } ]\n" +
" } ]\n" +
"}"
);
assertEquals(expectedSchema, schema.toJson(false));
}
@Test
public void testQueryForValues() {
List<String> ids = new ArrayList<>();
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
ids.add(element.getId());
assertFalse(allowTokens);
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, new AllLabels(EdgeLabelStrategy.edgeLabelsOnly),
GremlinFilters.EMPTY, new GraphElementSchemas());
assertEquals(Arrays.asList("7","8","9","10","11","12"), ids);
}
}
| 4,103 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/NeptuneGremlinClientTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.auth.HandshakeRequestConfig;
import com.amazonaws.services.neptune.cluster.ConcurrencyConfig;
import com.amazonaws.services.neptune.cluster.ConnectionConfig;
import com.amazonaws.services.neptune.propertygraph.io.SerializationConfig;
import org.apache.tinkerpop.gremlin.driver.Cluster;
import org.apache.tinkerpop.gremlin.driver.HandshakeInterceptor;
import org.apache.tinkerpop.gremlin.driver.LBAwareSigV4WebSocketChannelizer;
import org.apache.tinkerpop.gremlin.driver.ser.Serializers;
import org.junit.Test;
import org.apache.tinkerpop.gremlin.driver.Client;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class NeptuneGremlinClientTest {
private final SerializationConfig defaultSerializationConfig = new SerializationConfig(
Serializers.GRAPHBINARY_V1D0.name(), 50000000, NeptuneGremlinClient.DEFAULT_BATCH_SIZE, false);
@Test
public void testQueryClientSubmit() {
Client mockedClient = mock(Client.class);
NeptuneGremlinClient.QueryClient qc = new NeptuneGremlinClient.QueryClient(mockedClient);
qc.submit("test", null);
verify(mockedClient).submit("test");
}
@Test
public void testConnectionConfigPassthrough() {
com.amazonaws.services.neptune.cluster.Cluster mockedCluster = mock(com.amazonaws.services.neptune.cluster.Cluster.class);
Collection endpoints = new HashSet();
endpoints.add("localhost");
//With SSL Enabled
when(mockedCluster.connectionConfig()).thenReturn(new ConnectionConfig(
null, endpoints, 1234, false, true, null));
when(mockedCluster.concurrencyConfig()).thenReturn(new ConcurrencyConfig(1));
NeptuneGremlinClient client = NeptuneGremlinClient.create(mockedCluster, defaultSerializationConfig);
Cluster cluster = getClusterFromClient(client);
cluster.init();
assertEquals(1234, cluster.getPort());
assertEquals("wss://localhost:1234/gremlin", cluster.allHosts().iterator().next().getHostUri().toString());
assertEquals(true, cluster.isSslEnabled());
//With SSL Disabled
when(mockedCluster.connectionConfig()).thenReturn(new ConnectionConfig(
null, endpoints, 1234, false, false, null));
client = NeptuneGremlinClient.create(mockedCluster, defaultSerializationConfig);
cluster = getClusterFromClient(client);
cluster.init();
assertEquals("ws://localhost:1234/gremlin", cluster.allHosts().iterator().next().getHostUri().toString());
assertEquals(false, cluster.isSslEnabled());
}
@Test
public void shouldUseHandshakeInterceptorForSigningDirectConnections() {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
when(mockedConfig.isDirectConnection()).thenReturn(true);
Cluster.Builder builder = Cluster.build();
builder = NeptuneGremlinClient.configureIamSigning(builder, mockedConfig);
Cluster cluster = builder.create();
HandshakeInterceptor interceptor;
try {
Method getHandshakeInterceptor = cluster.getClass().getDeclaredMethod("getHandshakeInterceptor");
getHandshakeInterceptor.setAccessible(true);
interceptor = (HandshakeInterceptor) getHandshakeInterceptor.invoke(cluster);
getHandshakeInterceptor.setAccessible(false);
} catch (Exception e) {
throw new RuntimeException(e);
}
assertNotNull(interceptor);
assertNotEquals(interceptor, HandshakeInterceptor.NO_OP);
}
@Test
public void shouldUseLBAwareChannelizerForSigningProxyConnections() {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
when(mockedConfig.isDirectConnection()).thenReturn(false);
when(mockedConfig.handshakeRequestConfig()).thenReturn(mock(HandshakeRequestConfig.class));
Cluster.Builder builder = Cluster.build();
builder = NeptuneGremlinClient.configureIamSigning(builder, mockedConfig);
assertEquals(LBAwareSigV4WebSocketChannelizer.class.getName(), builder.create().getChannelizer());
}
private static Cluster getClusterFromClient(NeptuneGremlinClient client) {
try {
Field clusterField = client.getClass().getDeclaredField("cluster");
clusterField.setAccessible(true);
Cluster cluster = (Cluster) clusterField.get(client);
clusterField.setAccessible(false);
return cluster;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| 4,104 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/LabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;
import java.util.Arrays;
import static org.junit.Assert.*;
public class LabelTest {
@Test
public void shouldParseSimpleNodeLabelFromJson() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : \"label1\"\n" +
"}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("label1", label.fullyQualifiedLabel());
}
@Test
public void shouldParseComplexNodeLabelFromSingleSemiColonSeparatedStringValue() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : \"labelB;labelA\"\n" +
"}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("labelA;labelB", label.fullyQualifiedLabel());
}
@Test
public void shouldParseComplexNodeLabelFromArray() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : [ \"labelB\", \"labelA\" ]\n" +
"}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("labelA;labelB", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJson() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : [ \"startLabel\" ],\n" +
" \"~toLabels\" : [ \"endLabel\" ]\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel)-edgeLabel-(endLabel)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelWithMultiLabelStartAndEndVerticesFromJson() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : [ \"startLabel2\", \"startLabel1\" ],\n" +
" \"~toLabels\" : [ \"endLabel2\", \"endLabel1\", \"endLabel3\" ]\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel1;startLabel2)-edgeLabel-(endLabel1;endLabel2;endLabel3)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithSimpleStringStartAndEndVertexLabels() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : \"startLabel\",\n" +
" \"~toLabels\" : \"endLabel\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel)-edgeLabel-(endLabel)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithSemicolonSeparatedStringStartAndEndVertexLabels() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : \"startLabel2;startLabel1\",\n" +
" \"~toLabels\" : \"endLabel2;endLabel1;endLabel3\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel1;startLabel2)-edgeLabel-(endLabel1;endLabel2;endLabel3)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithMissingStartVertexLabel() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~toLabels\" : \"endLabel\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(_)-edgeLabel-(endLabel)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithMissingEndVertexLabel() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : [ \"startLabel\" ]\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel)-edgeLabel-(_)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithMissingStartAndEndVertexLabels() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("edgeLabel", label.fullyQualifiedLabel());
}
@Test
public void twoSimpleLabelsCanBeAssignedFromEachOther(){
Label l1 = new Label("my-label");
Label l2 = new Label("my-label");
assertTrue(l1.isAssignableFrom(l2));
assertTrue(l2.isAssignableFrom(l1));
}
@Test
public void twoEquivalentComplexLabelsCanBeAssignedFromEachOther(){
Label l1 = new Label("my-label", "startLabel1;startLabel2", "endLabel1;endLabel2");
Label l2 = new Label("my-label", "startLabel1;startLabel2", "endLabel1;endLabel2");
Label l3 = new Label("my-label", "startLabel2;startLabel1", "endLabel2;endLabel1");
assertTrue(l1.isAssignableFrom(l2));
assertTrue(l1.isAssignableFrom(l3));
assertTrue(l2.isAssignableFrom(l1));
assertTrue(l2.isAssignableFrom(l3));
assertTrue(l3.isAssignableFrom(l1));
assertTrue(l3.isAssignableFrom(l2));
}
@Test
public void simpleLabelCanBeAssignedFromComplexLabelButComplexLabelCannotBeAssignedFromSimpleLabel(){
Label l1 = new Label("my-label");
Label l2 = new Label("my-label", "startLabel", "endLabel");
assertTrue(l1.isAssignableFrom(l2));
assertFalse(l2.isAssignableFrom(l1));
}
@Test
public void complexLabelComprisingSubsetOfAnotherComplexLabelCanBeAssignedFromLatter(){
Label l1 = new Label("my-label", "startLabel1", "endLabel1");
Label l2 = new Label("my-label", "startLabel1", "");
Label l3 = new Label("my-label", Arrays.asList("startLabel2", "startLabel1"), Arrays.asList("endLabel2", "endLabel1"));
assertTrue(l1.isAssignableFrom(l3));
assertTrue(l2.isAssignableFrom(l3));
assertTrue(l2.isAssignableFrom(l1));
assertFalse(l3.isAssignableFrom(l1));
assertFalse(l3.isAssignableFrom(l2));
assertFalse(l1.isAssignableFrom(l2));
}
@Test
public void complexLabelsThatOnlyOverlapCannotBeAssignedFromEachOther(){
Label l1 = new Label("my-label", "startLabel1, startLabel2", "endLabel1, endLabel2");
Label l2 = new Label("my-label", "startLabel2, startLabel3", "endLabel2, endLabel3");
assertFalse(l1.isAssignableFrom(l2));
assertFalse(l2.isAssignableFrom(l1));
}
}
| 4,105 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/NodesClientTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.GraphElementHandler;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NodesClientTest {
private NodesClient client;
private GraphTraversalSource graphTraversalSource;
private ExportStats mockStats;
private FeatureToggles mockFeatures;
@Before
public void setup() {
graphTraversalSource = TinkerFactory.createModern().traversal();
mockStats = mock(ExportStats.class);
mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new NodesClient(graphTraversalSource, false, mockStats, mockFeatures);
}
@Test
public void testQueryForSchema() throws JsonProcessingException {
GraphSchema schema = new GraphSchema();
client.queryForSchema(
new GraphElementHandler<Map<?, Object>>() {
@Override
public void handle(Map<?, Object> properties, boolean allowTokens) throws IOException {
schema.update(GraphElementType.nodes, properties, allowTokens);
}
@Override
public void close() {}
},
Range.ALL, new AllLabels(NodeLabelStrategy.nodeLabelsOnly), GremlinFilters.EMPTY);
JsonNode expectedSchema = new ObjectMapper().readTree(
"{\n" +
" \"nodes\" : [ {\n" +
" \"label\" : \"software\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"name\",\n" +
" \"dataType\" : \"String\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"String\" ]\n" +
" }, {\n" +
" \"property\" : \"lang\",\n" +
" \"dataType\" : \"String\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"String\" ]\n" +
" } ]\n" +
" }, {\n" +
" \"label\" : \"person\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"name\",\n" +
" \"dataType\" : \"String\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"String\" ]\n" +
" }, {\n" +
" \"property\" : \"age\",\n" +
" \"dataType\" : \"Integer\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"Integer\" ]\n" +
" } ]\n" +
" } ]\n" +
"}"
);
assertEquals(expectedSchema, schema.toJson(false));
}
@Test
public void testQueryForValues() {
List<String> ids = new ArrayList<>();
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
ids.add(element.getId());
assertFalse(allowTokens);
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, new AllLabels(NodeLabelStrategy.nodeLabelsOnly),
GremlinFilters.EMPTY, new GraphElementSchemas());
assertEquals(Arrays.asList("1","2","3","4","5","6"), ids);
}
}
| 4,106 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/NodeLabelStrategyTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.propertygraph.io.result.ExportPGNodeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeAndVertexLabels;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeLabelsOnly;
import static com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy.nodeLabelsOnly;
import static org.junit.Assert.assertEquals;
public class NodeLabelStrategyTest {
private final GraphTraversalSource gmodern;
private final Map<String, Object> inputMap;
private final PGResult pgNodeResult;
private final List<String> labels;
public NodeLabelStrategyTest() {
gmodern = TinkerFactory.createModern().traversal();
labels = new ArrayList<>();
labels.add("TestLabel");
inputMap = new HashMap<>();
inputMap.put("~label", labels);
pgNodeResult = new ExportPGNodeResult(inputMap);
}
//Node Labels Only
@Test
public void shouldGetLabelsFromModernGraph() {
Collection<Label> labels = nodeLabelsOnly.getLabels(gmodern);
Collection<Label> expected = new HashSet<>();
expected.add(new Label("person"));
expected.add(new Label("software"));
assertEquals(expected, labels);
}
@Test
public void shouldGetLabelForMap() {
assertEquals(new Label(labels), nodeLabelsOnly.getLabelFor(inputMap));
}
@Test
public void shouldGetLabelForPgEdgeResult() {
assertEquals(new Label(labels), nodeLabelsOnly.getLabelFor(pgNodeResult));
}
}
| 4,107 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/SpecifiedLabelsTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.result.ExportPGNodeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
public class SpecifiedLabelsTest {
@Test
public void shouldCreateLabelFilterForSimpleSingleNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("label1")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().hasLabel(\"label1\")",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexSingleNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("label1;label2")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().hasLabel(\"label1\").hasLabel(\"label2\")",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterWithOrForMultipleSimpleNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1"), new Label("label2")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().or(__.hasLabel(\"label1\"),__.hasLabel(\"label2\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterWithOrForMultipleComplexNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1;labelA"), new Label("label2;labelB")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().or(__.hasLabel(\"label1\").hasLabel(\"labelA\"),__.hasLabel(\"label2\").hasLabel(\"labelB\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForSimpleEdgeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel", "endLabel")),
EdgeLabelStrategy.edgeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\")",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel", "endLabel")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.and(__.outV().hasLabel(\"startLabel\"),__.inV().hasLabel(\"endLabel\")))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabelWithComplexVertexLabels() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel1;startLabel2", "endLabel1;endLabel2")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.and(__.outV().hasLabel(\"startLabel1\").hasLabel(\"startLabel2\"),__.inV().hasLabel(\"endLabel1\").hasLabel(\"endLabel2\")))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabelWithOnlyStartVertexLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel", "")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.outV().hasLabel(\"startLabel\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabelWithOnlyEndVertexLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "", "endLabel")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.inV().hasLabel(\"endLabel\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void simpleEdgeLabelsShouldProvideIntersectionWithComplexEdgeLabels() {
SpecifiedLabels specifiedSimpleEdgeLabels = new SpecifiedLabels(
Arrays.asList(new Label("edgeLabel1"), new Label("edgeLabel2"), new Label("edgeLabel3")),
EdgeLabelStrategy.edgeAndVertexLabels);
List<Label> complexEdgeLabels = Arrays.asList(
new Label("edgeLabel2", "fromLabel2", "toLabel2"),
new Label("edgeLabel4", "fromLabel4", "toLabel4"));
LabelsFilter newFilter = specifiedSimpleEdgeLabels.intersection(complexEdgeLabels);
assertFalse(newFilter.isEmpty());
assertEquals("edges with label(s) '(fromLabel2)-edgeLabel2-(toLabel2)'", newFilter.description("edges"));
}
@Test
public void complexEdgeLabelsShouldProvideEmptyIntersectionWithSimpleEdgeLabels() {
SpecifiedLabels specifiedComplexEdgeLabels = new SpecifiedLabels(
Arrays.asList(new Label("edgeLabel1", "fromLabel1", "toLabel1"),
new Label("edgeLabel2", "fromLabel2", "toLabel2"),
new Label("edgeLabel3", "fromLabel3", "toLabel3")),
EdgeLabelStrategy.edgeAndVertexLabels);
List<Label> simpleEdgeLabels = Arrays.asList(
new Label("edgeLabel2"),
new Label("edgeLabel4"));
LabelsFilter newFilter = specifiedComplexEdgeLabels.intersection(simpleEdgeLabels);
assertTrue(newFilter.isEmpty());
assertEquals("edges with zero labels", newFilter.description("edges"));
}
@Test
public void shouldGetSpecifiedLabelForPGResult() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1"), new Label("label2")), labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = Collections.singletonList("label1");
input.put("~label", labels);
PGResult pgResult = new ExportPGNodeResult(input);
Label label = specifiedLabels.getLabelFor(pgResult);
assertEquals(new Label(labels), label);
}
@Test
public void shouldGetSpecifiedLabelForInputMap() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1"), new Label("label2")), labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = Collections.singletonList("label1");
input.put("~label", labels);
Label label = specifiedLabels.getLabelFor(input);
assertEquals(new Label(labels), label);
}
}
| 4,108 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/RangeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class RangeTest {
@Test
public void shouldIndicateThatRangeCoversAll(){
assertTrue(new Range(0, -1).isAll());
assertFalse(new Range(0, 1).isAll());
assertFalse(new Range(-1, -1).isAll());
}
@Test
public void shouldIndicateIfEmpty(){
assertTrue(new Range(-1, -1).isEmpty());
assertFalse(new Range(0, 1).isEmpty());
assertFalse(new Range(0, -1).isEmpty());
}
@Test
public void shouldIndicateIfSizeBiggerThanSuupliedValue(){
assertTrue(new Range(0, -1).sizeExceeds(100));
assertTrue(new Range(0, 200).sizeExceeds(100));
assertFalse(new Range(0, 100).sizeExceeds(100));
assertFalse(new Range(0, 100).sizeExceeds(200));
assertFalse(new Range(-1, -1).sizeExceeds(1));
}
}
| 4,109 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/AllLabelsTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.result.ExportPGNodeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class AllLabelsTest {
@Test
public void shouldGetLabelForPGResult() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
AllLabels allLabels = new AllLabels(labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = new ArrayList<>();
labels.add("TestLabel");
input.put("~label", labels);
PGResult pgResult = new ExportPGNodeResult(input);
Label label = allLabels.getLabelFor(pgResult);
assertEquals(new Label(labels), label);
}
@Test
public void shouldGetLabelForInputMap() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
AllLabels allLabels = new AllLabels(labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = new ArrayList<>();
labels.add("TestLabel");
input.put("~label", labels);
Label label = allLabels.getLabelFor(input);
assertEquals(new Label(labels), label);
}
@Test
public void shouldNotAddAnyLabelFiltersWhenApplied() {
AllLabels allLabels = new AllLabels(NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
allLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V()",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldNotAddAnyLabelFiltersWhenAppliedForEdges() {
AllLabels allLabels = new AllLabels(EdgeLabelStrategy.edgeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
allLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.E()",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void getPropertiesForLabelsTest() {
AllLabels allLabels = new AllLabels(NodeLabelStrategy.nodeLabelsOnly);
GraphElementSchemas graphElementSchemas = new GraphElementSchemas();
Label label = new Label("test");
Map<String, Object> properties = new HashMap<>();
properties.put("Test Prop int", 1);
properties.put("Test Prop String", "String");
properties.put("Test Prop Array", new int[]{1, 2});
graphElementSchemas.update(label, properties, false);
String[] propertyLabels = allLabels.getPropertiesForLabels(graphElementSchemas);
assertEquals(new String[]{"Test Prop String", "Test Prop Array", "Test Prop int"}, propertyLabels);
}
}
| 4,110 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/CsvPropertyGraphPrinterTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.PrintOutputWriter;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.schema.DataType;
import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema;
import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVRecord;
import org.junit.Test;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.*;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
public class CsvPropertyGraphPrinterTest {
@Test
public void shouldUseSeparatorToSeparateMultipleValues() throws Exception {
String separator = "|";
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("X", "Y"));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
new PrinterOptions(CsvPrinterOptions.builder().setMultiValueSeparator(separator).build()));
printer.printProperties(props);
assertEquals(
"\"X|Y\"",
stringWriter.toString());
}
@Test
public void shouldEscapeSeparatorValuesInMultipleValues() throws Exception {
String separator = "|";
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("A|B", "Y"));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
new PrinterOptions(CsvPrinterOptions.builder().setMultiValueSeparator(separator).build()));
printer.printProperties(props);
assertEquals(
"\"A\\|B|Y\"",
stringWriter.toString());
}
@Test
public void shouldUseEmptySeparatorToSeparateMultipleValues() throws Exception {
String separator = "";
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("X;B", "Y"));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
new PrinterOptions(CsvPrinterOptions.builder().setMultiValueSeparator(separator).build()));
printer.printProperties(props);
assertEquals(
"\"X;BY\"",
stringWriter.toString());
}
@Test
public void shouldEscapeTwoDoubleQuoteAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("{\"hobby\" : \"watching \"Flash\"\"}",
"\"{\"\"hobby\"\" : \"\"watching \"\"Flash\"\"\"\"}\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldEscapeThreeDoubleQuoteAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("{\"hobby\" : \"watching \"The \"Flash\"\"\"}",
"\"{\"\"hobby\"\" : \"\"watching \"\"The \"\"Flash\"\"\"\"\"\"}\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldPrintCommaInStringWhenPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("{\"hobby\", \"watching \"The \"Flash\"\"}",
"\"{\"\"hobby\"\", \"\"watching \"\"The \"\"Flash\"\"\"\"}\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldNotEscapeNewlineCharAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\nB", "\"A\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldNotEscapeNewlineAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A" + System.lineSeparator() + "B", "\"A\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldEscapeNewlineCharSetTrueAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\nB",
"\"A\\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().setEscapeNewline(true).build()));
}
@Test
public void shouldEscapeNewlineSetTrueAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A" + System.lineSeparator() + "B",
"\"A\\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().setEscapeNewline(true).build()));
}
@Test
public void shouldNotEscapeNewlineCharsAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\n\nB", "\"A\n\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldEscapeNewlineCharsSetTrueAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\n\nB",
"\"A\\n\\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().setEscapeNewline(true).build()));
}
// A set of tests to ensure that String escaping is done properly when CSVPropertyGraphPrinter prints to
// a buffer, so when the buffer is read in by CSVFormat, the original property string is received
private void testEscapeCharacterAfterPrintPropertiesAndRewrite(String originalValue, String expectedValue, PrinterOptions printerOptions) throws IOException {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Collections.singletonList(originalValue));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
printerOptions);
printer.printProperties(props);
// all double quotes should be escaped when printer prints
assertEquals(expectedValue, stringWriter.toString());
// using CSVFormat to read in printed items (same library used by RewriteCSV)
String[] filePropertyHeaders = labelSchema.propertySchemas().stream()
.map(p -> p.property().toString())
.collect(Collectors.toList())
.toArray(new String[]{});
CSVFormat format = CSVFormat.RFC4180.builder().setHeader(filePropertyHeaders).build();
Reader in = new StringReader(stringWriter.toString());
Iterable<CSVRecord> records = format.parse(in);
for (CSVRecord record : records) {
// what CSVFormat read in from printed CSV should be the original value
if (printerOptions.csv().escapeNewline()){
// parsed record will contain escaped newline, to compare to original we have to unescape it
assertEquals(originalValue, record.get("property1").replace("\\n", "\n"));
} else {
assertEquals(originalValue, record.get("property1"));
}
// double quotes should all be properly escaped again when we format for rewrite
assertEquals(expectedValue, DataType.String.format(record.get("property1"), printerOptions.csv().escapeNewline()));
}
}
}
| 4,111 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/JsonPropertyGraphPrinterTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.PrintOutputWriter;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.schema.DataType;
import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema;
import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
import java.util.*;
import static com.amazonaws.services.neptune.util.MapUtils.entry;
import static com.amazonaws.services.neptune.util.MapUtils.map;
import static org.junit.Assert.*;
import static org.junit.Assert.assertTrue;
public class JsonPropertyGraphPrinterTest {
@Test
public void shouldPrintEdge() throws Exception {
StringWriter stringWriter = new StringWriter();
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(
new PrintOutputWriter("test", stringWriter),
new LabelSchema(new Label("my-label")),
PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printEdge("edge-id", "edge-label", "from-id", "to-id");
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"~id\":\"edge-id\",\"~label\":\"edge-label\",\"~from\":\"from-id\",\"~to\":\"to-id\"}",
stringWriter.toString());
}
@Test
public void shouldPrintEmptyListAsListIrrespectiveOfWhetherMultiValueIsTrue() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
PropertySchema propertySchema2 = new PropertySchema("property2", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
labelSchema.put("property2", propertySchema2);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", new ArrayList<>());
put("property2", new ArrayList<>());
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"property1\":[],\"property2\":[]}",
stringWriter.toString());
}
@Test
public void shouldPrintSingleValueListAsSingleValueWhenIsMultiValueIsFalse() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema = new PropertySchema("tags", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("tags", propertySchema);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("tags", Collections.singletonList("tag1"));
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"tags\":\"tag1\"}",
stringWriter.toString());
}
@Test
public void shouldPrintSingleValueListAsSingleValueWhenIsMultiValueIsFalseButStrictCardinalityIsEnforced() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema = new PropertySchema("tags", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("tags", propertySchema);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("tags", Collections.singletonList("tag1"));
}};
PrinterOptions printerOptions = new PrinterOptions(JsonPrinterOptions.builder().setStrictCardinality(true).build());
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, printerOptions)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"tags\":[\"tag1\"]}",
stringWriter.toString());
}
@Test
public void shouldPrintSingleValueListAsArrayWhenIsMultiValueIsTrue() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema = new PropertySchema("tags", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("tags", propertySchema);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("tags", Collections.singletonList("tag1"));
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"tags\":[\"tag1\"]}",
stringWriter.toString());
}
@Test
public void shouldPrintMultiValueListAsArrayIrrespectiveOfWhetherMultiValueIsTrue() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
PropertySchema propertySchema2 = new PropertySchema("property2", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
labelSchema.put("property2", propertySchema2);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("tag1", "tag2"));
put("property2", Arrays.asList("tag1", "tag2"));
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"property1\":[\"tag1\",\"tag2\"],\"property2\":[\"tag1\",\"tag2\"]}",
stringWriter.toString());
}
@Test
public void appendsPreviouslyUnseenValuesToObjectWhenInferringSchema() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("fname", "fname1")),
map(entry("fname", "fname2"), entry("lname", "lname2")),
map(entry("fname", "fname3"), entry("age", 30)),
map(entry("lname", "lname4"), entry("age", 40)),
map(entry("fname", "fname5"), entry("lname", "lname5"), entry("age", 50))
);
String expectedOutput = "{\"fname\":\"fname1\"}\n" +
"{\"fname\":\"fname2\",\"lname\":\"lname2\"}\n" +
"{\"fname\":\"fname3\",\"age\":30}\n" +
"{\"lname\":\"lname4\",\"age\":40}\n" +
"{\"fname\":\"fname5\",\"lname\":\"lname5\",\"age\":50}";
assertEquals(expectedOutput, stringWriter.toString());
}
@Test
public void updatesDataTypesInSchemaForColumnsWithEachNewRowWhenInferringSchema() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("age", 10)),
map(entry("age", "ten"), entry("height", 5)),
map(entry("age", 11), entry("height", 5.2))
);
assertEquals(2, labelSchema.propertyCount());
assertEquals(DataType.String, labelSchema.getPropertySchema("age").dataType());
}
@Test
@Ignore
public void keepsOriginalDatatypesForPropertyValuesWhenWritingProperties() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("age", 10)),
map(entry("age", "ten"), entry("height", 5)),
map(entry("age", 11), entry("height", 5.2))
);
String expectedOutput = "{\"age\":10}\n" +
"{\"age\":\"ten\",\"height\":5}\n" +
"{\"age\":11,\"height\":5.2}";
assertEquals(expectedOutput, stringWriter.toString());
}
@Test
public void columnsThatDoNotAppearInFirstRowAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-1", 30), entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertFalse(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
@Test
public void columnsThatAppearInFirstRowButNotSubsequentRowsAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertTrue(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
private void print(PropertyGraphPrinter printer, Map<?, ?>... rows) throws IOException {
for (Map<?, ?> row : rows) {
printer.printStartRow();
printer.printProperties(row);
printer.printEndRow();
}
}
}
| 4,112 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/QueryTaskTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.Status;
import com.amazonaws.services.neptune.io.StatusOutputFormat;
import com.amazonaws.services.neptune.propertygraph.AllLabels;
import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.NamedQuery;
import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient;
import com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.schema.FileSpecificLabelSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.driver.Result;
import org.apache.tinkerpop.gremlin.driver.ResultSet;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.io.IOException;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class QueryTaskTest {
private final GraphTraversalSource gModern = TinkerFactory.createModern().traversal();
@Test
public void gDotEShouldOnlyCreateEdges() throws Exception {
QueryTask qt = createQueryTask(gModern.E().elementMap(), true);
Map<GraphElementType, FileSpecificLabelSchemas> results = qt.call();
FileSpecificLabelSchemas nodeSchemas = results.get(GraphElementType.nodes);
FileSpecificLabelSchemas edgeSchemas = results.get(GraphElementType.edges);
assertEquals(0, nodeSchemas.labels().size());
assertEquals(2, edgeSchemas.labels().size());
assertTrue(edgeSchemas.hasSchemasForLabel(new Label("knows")));
assertTrue(edgeSchemas.hasSchemasForLabel(new Label("created")));
}
@Test
public void gDotVShouldOnlyCreateNodes() throws Exception {
QueryTask qt = createQueryTask(gModern.V().elementMap(), true);
Map<GraphElementType, FileSpecificLabelSchemas> results = qt.call();
FileSpecificLabelSchemas nodeSchemas = results.get(GraphElementType.nodes);
FileSpecificLabelSchemas edgeSchemas = results.get(GraphElementType.edges);
assertEquals(2, nodeSchemas.labels().size());
assertEquals(0, edgeSchemas.labels().size());
assertTrue(nodeSchemas.hasSchemasForLabel(new Label("person")));
assertTrue(nodeSchemas.hasSchemasForLabel(new Label("software")));
}
@Test
public void shouldSeparateEdgesAndNodes() throws Exception {
QueryTask qt = createQueryTask(gModern.V().union(__.hasLabel("person"), __.outE().hasLabel("created")).elementMap(), true);
Map<GraphElementType, FileSpecificLabelSchemas> results = qt.call();
FileSpecificLabelSchemas nodeSchemas = results.get(GraphElementType.nodes);
FileSpecificLabelSchemas edgeSchemas = results.get(GraphElementType.edges);
assertEquals(1, nodeSchemas.labels().size());
assertEquals(1, edgeSchemas.labels().size());
assertTrue(nodeSchemas.hasSchemasForLabel(new Label("person")));
assertTrue(edgeSchemas.hasSchemasForLabel(new Label("created")));
}
private NeptuneGremlinClient.QueryClient getMockClient(GraphTraversal traversal) {
NeptuneGremlinClient.QueryClient mockClient = mock(NeptuneGremlinClient.QueryClient.class);
ResultSet results = mock(ResultSet.class);
when(results.stream()).thenReturn(traversal.toStream().map(r -> new Result(r)));
when(mockClient.submit(any(), any())).thenReturn(results);
return mockClient;
}
private QueryTask createQueryTask(GraphTraversal traversal, boolean structuredOutput) throws IOException {
Queue<NamedQuery> mockQueries = new LinkedList<>();
mockQueries.add(mock(NamedQuery.class));
PropertyGraphTargetConfig targetConfig = mock(PropertyGraphTargetConfig.class);
when(targetConfig.createPrinterForEdges(any(), any())).thenReturn(mock(PropertyGraphPrinter.class));
when(targetConfig.createPrinterForNodes(any(), any())).thenReturn(mock(PropertyGraphPrinter.class));
return new QueryTask(mockQueries,
getMockClient(traversal),
targetConfig,
false,
10000L,
new Status(StatusOutputFormat.Description, "query results test"),
new AtomicInteger(),
structuredOutput,
new AllLabels(NodeLabelStrategy.nodeLabelsOnly),
new AllLabels(EdgeLabelStrategy.edgeLabelsOnly)
);
}
}
| 4,113 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/EdgeWriterTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.AllLabels;
import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.EdgesClient;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.GremlinFilters;
import com.amazonaws.services.neptune.propertygraph.LabelStrategy;
import com.amazonaws.services.neptune.propertygraph.LabelsFilter;
import com.amazonaws.services.neptune.propertygraph.Range;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.io.result.QueriesEdgeResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class EdgeWriterTest {
private EdgesClient client;
private GraphTraversalSource gModern;
@Before
public void setup() {
gModern = TinkerFactory.createModern().traversal();
ExportStats mockStats = mock(ExportStats.class);
FeatureToggles mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new EdgesClient(gModern, false, mockStats, mockFeatures);
}
@Test
public void shouldHandlePGEdgeResultWithEdgeLabelsOnly() throws IOException {
PGEdgeResult edgeResult = getPGEdgeResult("7", new AllLabels(EdgeLabelStrategy.edgeLabelsOnly));
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeLabelsOnly.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
@Test
public void shouldHandlePGEdgeResultWithEdgeAndVertexLabels() throws IOException {
PGEdgeResult edgeResult = getPGEdgeResult("7", new AllLabels(EdgeLabelStrategy.edgeAndVertexLabels));
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeAndVertexLabels.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2, fromLabels{person, }, toLabels{person, }] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
@Test
public void shouldHandleQueriesEdgeResultWithEdgeLabelsOnly() throws IOException {
QueriesEdgeResult edgeResult = getQueriesEdgeResult("7");
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeLabelsOnly.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
@Test
public void shouldHandleQueriesEdgeResultWithEdgeAndVertexLabels() throws IOException {
QueriesEdgeResult edgeResult = getQueriesEdgeResult("7");
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeAndVertexLabels.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2, fromLabels{person, }, toLabels{person, }] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
private PGEdgeResult getPGEdgeResult(String id, LabelsFilter labelsFilter) {
final PGEdgeResult[] result = {null};
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
if(element.getId().equals(id)) {
result[0] = (PGEdgeResult) element;
}
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, labelsFilter,
GremlinFilters.EMPTY, new GraphElementSchemas());
return result[0];
}
private QueriesEdgeResult getQueriesEdgeResult(String id) {
return new QueriesEdgeResult(gModern.E(id).elementMap().next());
}
}
| 4,114 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/VariableRowCsvPropertyGraphPrinterTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.PrintOutputWriter;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.schema.DataType;
import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Map;
import static com.amazonaws.services.neptune.util.MapUtils.entry;
import static com.amazonaws.services.neptune.util.MapUtils.map;
import static org.junit.Assert.*;
public class VariableRowCsvPropertyGraphPrinterTest {
private final PrinterOptions printerOptions = PrinterOptions.NULL_OPTIONS;
@Test
public void appendsPreviouslyUnseenColumnsToEndOfRow() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("fname", "fname1")),
map(entry("fname", "fname2"), entry("lname", "lname2")),
map(entry("fname", "fname3"), entry("age", 30)),
map(entry("lname", "lname4"), entry("age", 40)),
map(entry("fname", "fname5"), entry("lname", "lname5"), entry("age", 50))
);
String expectedOutput = "\"fname1\"\n" +
"\"fname2\",\"lname2\"\n" +
"\"fname3\",,30\n" +
",\"lname4\",40\n" +
"\"fname5\",\"lname5\",50\n";
assertEquals(expectedOutput, stringWriter.toString());
}
@Test
public void updatesDataTypesForColumnsWithEachNewRow() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("age", 10)),
map(entry("age", "ten"), entry("height", 5)),
map(entry("age", 11), entry("height", 5.2))
);
assertEquals(2, labelSchema.propertyCount());
assertEquals(DataType.String, labelSchema.getPropertySchema("age").dataType());
assertEquals(DataType.Double, labelSchema.getPropertySchema("height").dataType());
}
@Test
public void columnsThatDoNotAppearInFirstRowAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-1", 30), entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertFalse(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
@Test
public void columnsThatAppearInFirstRowButNotSubsequentRowsAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertTrue(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
private void print(PropertyGraphPrinter printer, Map<?, ?>... rows) throws IOException {
for (Map<?, ?> row : rows) {
printer.printStartRow();
printer.printProperties(row);
printer.printEndRow();
}
}
}
| 4,115 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/NodeWriterTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.AllLabels;
import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.EdgesClient;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.GremlinFilters;
import com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.NodesClient;
import com.amazonaws.services.neptune.propertygraph.Range;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NodeWriterTest {
private NodesClient client;
@Before
public void setup() {
GraphTraversalSource graphTraversalSource = TinkerFactory.createModern().traversal();
ExportStats mockStats = mock(ExportStats.class);
FeatureToggles mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new NodesClient(graphTraversalSource, false, mockStats, mockFeatures);
}
@Test
public void testHandle() throws IOException {
PGResult nodeResult = getPGNodeResult("1");
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
NodeWriter nodeWriter = new NodeWriter(pgPrinter);
nodeWriter.handle(nodeResult, true);
String expected = "Start Row\n" +
"Node[1, Labels{person, }] Properties{name:[marko], age:[29], } \n";
assertEquals(expected, pgPrinter.getOutput());
}
private PGResult getPGNodeResult(String id) {
final PGResult[] result = {null};
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
if(element.getId().equals(id)) {
result[0] = element;
}
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, new AllLabels(NodeLabelStrategy.nodeLabelsOnly),
GremlinFilters.EMPTY, new GraphElementSchemas());
return result[0];
}
}
| 4,116 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/PropertyGraphStringPrinter.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema;
import com.amazonaws.services.neptune.util.NotImplementedException;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
class PropertyGraphStringPrinter implements PropertyGraphPrinter {
StringBuilder output = new StringBuilder();
public String getOutput() {
return output.toString();
}
@Override
public String outputId() {
return null;
}
@Override
public void printHeaderMandatoryColumns(String... columns) {
throw new NotImplementedException();
}
@Override
public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) {
throw new NotImplementedException();
}
@Override
public void printProperties(Map<?, ?> properties) throws IOException {
output.append("Properties{");
properties.forEach((key, value) -> {
output.append(key.toString() + ":" + value.toString() + ", ");
});
output.append("} ");
}
@Override
public void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException {
printProperties(properties);
}
@Override
public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException {
printProperties(properties);
}
@Override
public void printEdge(String id, String label, String from, String to) throws IOException {
output.append(String.format("Edge[%s, %s, %s, %s] ", id, label, from, to));
}
@Override
public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException {
StringBuilder builder = new StringBuilder();
builder.append(String.format("Edge[%s, %s, %s, %s, fromLabels{", id, label, from, to));
for (String fromLabel : fromLabels) {
builder.append(fromLabel).append(", ");
}
builder.append("}, toLabels{");
for (String toLabel : toLabels) {
builder.append(toLabel).append(", ");
}
builder.append("}] ");
output.append(builder.toString());
}
@Override
public void printNode(String id, List<String> labels) throws IOException {
StringBuilder builder = new StringBuilder();
builder.append(String.format("Node[%s, Labels{", id));
for (String label : labels) {
builder.append(label).append(", ");
}
builder.append("}] ");
output.append(builder.toString());
}
@Override
public void printStartRow() throws IOException {
output.append("Start Row\n");
}
@Override
public void printEndRow() throws IOException {
output.append("\n");
}
@Override
public void close() throws Exception {
}
}
| 4,117 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/QueriesEdgeResultTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.propertygraph.io.result.QueriesEdgeResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class QueriesEdgeResultTest {
private final GraphTraversalSource gModern = TinkerFactory.createModern().traversal();
/**
* Wrap TinkerPop's modern graph g.E(9) in a QueriesEdgeResult and asserts correct results
*/
@Test
public void testStandardEdgeElementMap() {
QueriesEdgeResult modernE9 = new QueriesEdgeResult(gModern.E("9").elementMap().next());
assertEquals(GraphElementType.edges, modernE9.getGraphElementType());
assertEquals(Collections.singletonList("created"), modernE9.getLabel());
assertEquals("9", modernE9.getId());
Map<String, Object> properties = new HashMap<>();
properties.put("weight", 0.4);
assertEquals(properties, modernE9.getProperties());
assertEquals("1", modernE9.getFrom());
assertEquals("3", modernE9.getTo());
assertEquals(Collections.singletonList("person"), modernE9.getFromLabels());
assertEquals(Collections.singletonList("software"), modernE9.getToLabels());
}
@Test
public void testEdgeWithNoProperties() {
Map e9 = gModern.E("9").elementMap().next();
e9.remove("weight");
QueriesEdgeResult queriesEdgeResult = new QueriesEdgeResult(e9);
assertEquals(new HashMap<String, Object>(), queriesEdgeResult.getProperties());
}
}
| 4,118 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/QueriesNodeResultTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.propertygraph.io.result.QueriesNodeResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
public class QueriesNodeResultTest {
private final GraphTraversalSource gModern = TinkerFactory.createModern().traversal();
/**
* Wrap TinkerPop's modern graph g.V(1) in a QueriesNodeResult and asserts correct results
*/
@Test
public void testStandardNodeElementMap() {
QueriesNodeResult modernV1 = new QueriesNodeResult(gModern.V("1").elementMap().next());
assertEquals(GraphElementType.nodes, modernV1.getGraphElementType());
assertEquals(Collections.singletonList("person"), modernV1.getLabel());
assertEquals("1", modernV1.getId());
Map<String, Object> properties = new HashMap<>();
properties.put("name", "marko");
properties.put("age", 29);
assertEquals(properties, modernV1.getProperties());
assertThrows(IllegalStateException.class, () -> {modernV1.getFrom();});
assertThrows(IllegalStateException.class, () -> {modernV1.getTo();});
assertThrows(IllegalStateException.class, () -> {modernV1.getFromLabels();});
assertThrows(IllegalStateException.class, () -> {modernV1.getToLabels();});
}
@Test
public void testNodeWithNoProperties() {
Map v1 = gModern.V("1").elementMap().next();
v1.remove("name");
v1.remove("age");
QueriesNodeResult queriesNodeResult = new QueriesNodeResult(v1);
assertEquals(new HashMap<String, Object>(), queriesNodeResult.getProperties());
}
}
| 4,119 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/DataTypeTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Date;
import static org.junit.Assert.assertEquals;
public class DataTypeTest {
@Test
public void emptyStringDateValueShouldReturnEmptyString() {
String result = DataType.Date.format("");
assertEquals("", result);
}
@Test
public void shouldEscapeDoubleQuotes() {
String result = DataType.String.format("One \"two\" three");
assertEquals("\"One \"\"two\"\" three\"", result);
}
@Test
public void shouldEscapeTwoDoubleQuotes() {
String result = DataType.String.format("One \"\"two\"\" three");
assertEquals("\"One \"\"\"\"two\"\"\"\" three\"", result);
}
@Test
public void shouldEscapeThreeDoubleQuotes() {
String result = DataType.String.format("One \"\"\"two\"\"\" three");
assertEquals("\"One \"\"\"\"\"\"two\"\"\"\"\"\" three\"", result);
}
@Test
public void shouldRoundTripDate() {
Date now = new Date();
DataType dataType = DataType.dataTypeFor(now.getClass());
String nowString = dataType.format(now);
Object converted = dataType.convert(nowString);
assertEquals(now, converted);
}
@Test
public void shouldRoundTripDateWhenCallingFormatWithEscapeNewlineParam() {
Date now = new Date();
DataType dataType = DataType.dataTypeFor(now.getClass());
String nowString = dataType.format(now, false);
Object converted = dataType.convert(nowString);
assertEquals(now, converted);
}
@Test
public void shouldNotEscapeNewlineChar(){
String result = DataType.String.format("A\nB");
assertEquals("\"A\nB\"", result);
}
@Test
public void shouldNotEscapeNewline(){
String result = DataType.String.format("A" + System.lineSeparator() + "B");
assertEquals("\"A\nB\"", result);
}
@Test
public void shouldEscapeNewlineCharIfEscapeNewlineSetToTrue(){
String result = DataType.String.format("A\nB", true);
assertEquals("\"A\\nB\"", result);
}
@Test
public void shouldEscapeNewlineIfEscapeNewlineSetToTrue(){
String result = DataType.String.format("A" + System.lineSeparator() + "B", true);
assertEquals("\"A\\nB\"", result);
}
@Test
public void doubleShouldWriteIntAsDouble() throws IOException {
String result1 = createJsonArray(generator -> DataType.Double.printTo(generator, 0));
assertEquals("[0.0]", result1);
String result2 = createJsonObject(generator -> DataType.Double.printTo(generator, "value", 0));
assertEquals("{\"value\":0.0}", result2);
}
@Test
public void longShouldWriteIntAsLong() throws IOException {
String result1 = createJsonArray(generator -> DataType.Long.printTo(generator, 0));
assertEquals("[0]", result1);
String result2 = createJsonObject(generator -> DataType.Long.printTo(generator, "value", 0));
assertEquals("{\"value\":0}", result2);
}
@Test
public void floatShouldWriteIntAsFloat() throws IOException {
String result1 = createJsonArray(generator -> DataType.Float.printTo(generator, 0));
assertEquals("[0.0]", result1);
String result2 = createJsonObject(generator -> DataType.Float.printTo(generator, "value", 0));
assertEquals("{\"value\":0.0}", result2);
}
@Test
public void shortShouldWriteIntAsShort() throws IOException {
String result1 = createJsonArray(generator -> DataType.Short.printTo(generator, 0));
assertEquals("[0]", result1);
String result2 = createJsonObject(generator -> DataType.Short.printTo(generator, "value", 0));
assertEquals("{\"value\":0}", result2);
}
@Test
public void byteShouldWriteIntAsByte() throws IOException {
String result1 = createJsonArray(generator -> DataType.Byte.printTo(generator, 0));
assertEquals("[0]", result1);
String result2 = createJsonObject(generator -> DataType.Byte.printTo(generator, "value", 0));
assertEquals("{\"value\":0}", result2);
}
@Test
public void boolShouldWriteIntAsBool() throws IOException {
String result1 = createJsonArray(generator -> DataType.Boolean.printTo(generator, 0));
assertEquals("[false]", result1);
String result2 = createJsonObject(generator -> DataType.Boolean.printTo(generator, "value", 0));
assertEquals("{\"value\":false}", result2);
}
private String createJsonArray(UseDataType useDataType) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(writer);
jsonGenerator.writeStartArray();
useDataType.apply(jsonGenerator);
jsonGenerator.writeEndArray();
jsonGenerator.flush();
return writer.toString();
}
private String createJsonObject(UseDataType useDataType) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(writer);
jsonGenerator.writeStartObject();
useDataType.apply(jsonGenerator);
jsonGenerator.writeEndObject();
jsonGenerator.flush();
return writer.toString();
}
private interface UseDataType{
void apply(JsonGenerator generator) throws IOException;
}
}
| 4,120 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/LabelSchemaTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import com.amazonaws.services.neptune.propertygraph.Label;
import org.junit.Test;
import java.util.EnumSet;
import static org.junit.Assert.*;
public class LabelSchemaTest {
@Test
public void unioningShouldUpdateDataTypesOfExistingProperties(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p3", new PropertySchema("p3", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p3", new PropertySchema("p3", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema result = labelSchema1.union(labelSchema2);
assertEquals(result.getPropertySchema("p1"),
new PropertySchema("p1", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
assertEquals(result.getPropertySchema("p2"),
new PropertySchema("p2", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
assertEquals(result.getPropertySchema("p3"),
new PropertySchema("p3", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
}
@Test
public void unioningShouldAddNewProperties(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p3", new PropertySchema("p3", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p4", new PropertySchema("p4", false, DataType.String, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p5", new PropertySchema("p5", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
LabelSchema result = labelSchema1.union(labelSchema2);
assertEquals(5, result.propertySchemas().size());
assertEquals(result.getPropertySchema("p4"),
new PropertySchema("p4", false, DataType.String, false, EnumSet.noneOf(DataType.class)));
assertEquals(result.getPropertySchema("p5"),
new PropertySchema("p5", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
}
@Test
public void schemasWithSameLabelAndPropertySchemasAreSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertTrue(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithDifferentLabelsAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("this-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("that-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithDifferentPropertiesAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Double, true, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithDifferentNumberOfPropertiesAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p3", new PropertySchema("p3", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema3 = new LabelSchema(new Label("my-label"));
labelSchema3.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
assertFalse(labelSchema1.isSameAs(labelSchema3));
}
@Test
public void schemasWithPropertySchemasInDifferentOrderAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithPropertiesWithDifferentNullableCharacteristicsAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", true, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
}
| 4,121 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/GraphElementSchemasTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import com.amazonaws.services.neptune.propertygraph.Label;
import org.junit.Test;
import static com.amazonaws.services.neptune.util.MapUtils.entry;
import static com.amazonaws.services.neptune.util.MapUtils.map;
import static org.junit.Assert.*;
public class GraphElementSchemasTest {
@Test
public void canCreateCopyOfSelf(){
GraphElementSchemas original = new GraphElementSchemas();
original.update(new Label("label1"), map(entry("fname", "fname-1")), false);
original.update(new Label("label1"), map(entry("lname", "lname-1")), false);
original.update(new Label("label2"), map(entry("fname", "fname-2"), entry("lname", "lname-2")), false);
GraphElementSchemas copy = original.createCopy();
assertEquals(original.toJson(), copy.toJson());
assertNotEquals(original, copy);
}
}
| 4,122 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/PropertySchemaTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import org.junit.Test;
import java.util.EnumSet;
import static org.junit.Assert.*;
public class PropertySchemaTest {
@Test
public void revisionWhereAtLeastOneSchemaIsMultiValueShouldResultInMultiValue(){
PropertySchema schema1 = new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class));
PropertySchema schema2 = new PropertySchema("p1", false, DataType.Integer, true, EnumSet.noneOf(DataType.class));
assertTrue(schema1.union(schema2).isMultiValue());
assertTrue(schema2.union(schema1).isMultiValue());
}
@Test
public void revisionWhereAtLeastOneSchemaIsNullableShouldResultInNullable(){
PropertySchema schema1 = new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class));
PropertySchema schema2 = new PropertySchema("p1", true, DataType.Integer, false, EnumSet.noneOf(DataType.class));
assertTrue(schema1.union(schema2).isNullable());
assertTrue(schema2.union(schema1).isNullable());
}
@Test
public void shouldEscapePropertyNameContainingColons(){
PropertySchema schema = new PropertySchema("p1:a:b:c", false, DataType.Integer, false, EnumSet.noneOf(DataType.class));
assertEquals("p1\\:a\\:b\\:c:int", schema.nameWithDataType(true));
assertEquals("p1\\:a\\:b\\:c", schema.nameWithoutDataType(true));
assertEquals("p1:a:b:c:int", schema.nameWithDataType());
assertEquals("p1:a:b:c", schema.nameWithoutDataType());
}
}
| 4,123 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/incremental_export/StreamRecordsNotFoundExceptionParserTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.incremental_export;
import com.amazonaws.services.neptune.cluster.EventId;
import com.amazonaws.services.neptune.cluster.StreamRecordsNotFoundExceptionParser;
import org.junit.Test;
import static org.junit.Assert.*;
public class StreamRecordsNotFoundExceptionParserTest {
@Test
public void ShouldParseCommitNumAndOpNum(){
String errorMessage = "Requested startEventId is from the future. Last valid eventId is [commitNum = 1132, opNum = 200]";
EventId lastEventId = StreamRecordsNotFoundExceptionParser.parseLastEventId(errorMessage);
assertEquals(1132, lastEventId.commitNum());
assertEquals(200, lastEventId.opNum());
}
@Test
public void ShouldReturnMinus1IfNotFound(){
String errorMessage = "Requested startEventId is from the future";
EventId lastEventId = StreamRecordsNotFoundExceptionParser.parseLastEventId(errorMessage);
assertEquals(-1, lastEventId.commitNum());
assertEquals(-1, lastEventId.opNum());
}
}
| 4,124 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/JsonFromResource.java
|
package com.amazonaws.services.neptune.profiles.neptune_ml;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.File;
import java.io.IOException;
import java.util.Objects;
public class JsonFromResource {
public static JsonNode get(String filename, Class<?> testClass) throws IOException {
String path = String.format("%s/%s", testClass.getSimpleName(), filename);
ClassLoader classLoader = testClass.getClassLoader();
File file = new File(Objects.requireNonNull(classLoader.getResource(path)).getFile());
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readTree(file);
}
}
| 4,125 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/Output.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
public class Output {
public static String format(JsonNode json) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(json);
}
private final StringWriter writer = new StringWriter();
private final JsonGenerator generator;
public Output() throws IOException {
this.generator = createJsonGenerator(writer);
}
public JsonGenerator generator(){
return generator;
}
public JsonNode allOutput() throws JsonProcessingException {
return new ObjectMapper().readTree(writer.toString());
}
public JsonNode graph() throws JsonProcessingException {
return new ObjectMapper().readTree(writer.toString()).path("graph");
}
public ArrayNode warnings() throws JsonProcessingException {
return (ArrayNode) new ObjectMapper().readTree(writer.toString()).path("warnings");
}
private JsonGenerator createJsonGenerator(Writer writer) throws IOException {
JsonGenerator generator = new JsonFactory().createGenerator(writer);
generator.setPrettyPrinter(new DefaultPrettyPrinter());
return generator;
}
}
| 4,126 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1FeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class PropertyGraphTrainingDataConfigWriterV1FeatureTest {
@Test
public void shouldWriteNewObjectForEach() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
nodeSchemas.addLabelSchema(new LabelSchema(new Label("Person")), Arrays.asList("person-1.csv", "person-2.csv"));
edgeSchemas.addLabelSchema(new LabelSchema(new Label("follows")), Arrays.asList("follows-1.csv", "follows-2.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(4, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertEquals("nodes/person-1.csv", array.get(0).path("file_name").textValue());
Assert.assertEquals("nodes/person-2.csv", array.get(1).path("file_name").textValue());
Assert.assertEquals("edges/follows-1.csv", array.get(2).path("file_name").textValue());
Assert.assertEquals("edges/follows-2.csv", array.get(3).path("file_name").textValue());
}
@Test
public void everyObjectShouldHaveACommaSeparator() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
nodeSchemas.addLabelSchema(new LabelSchema(new Label("Person")), Collections.singletonList("person-1.csv"));
edgeSchemas.addLabelSchema(new LabelSchema(new Label("follows")), Collections.singletonList("follows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(2, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertEquals(",", array.get(0).path("separator").textValue());
Assert.assertEquals(",", array.get(1).path("separator").textValue());
}
@Test
public void edgesShouldIncludeEdgeSpec() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
edgeSchemas.addLabelSchema(new LabelSchema(
new Label(
"follows",
Arrays.asList("Person", "Admin"),
Arrays.asList("Person", "Temp"))),
Collections.singletonList("follows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode edges = (ArrayNode) array.get(0).path("edges");
Assert.assertEquals(1, edges.size());
JsonNode edge = edges.get(0);
Assert.assertEquals("edge", edge.path("edge_spec_type").textValue());
ArrayNode cols = (ArrayNode) edge.path("cols");
Assert.assertEquals("~from", cols.get(0).textValue());
Assert.assertEquals("~to", cols.get(1).textValue());
ArrayNode edgeType = (ArrayNode) edge.path("edge_type");
Assert.assertEquals("Admin;Person", edgeType.get(0).textValue());
Assert.assertEquals("follows", edgeType.get(1).textValue());
Assert.assertEquals("Person;Temp", edgeType.get(2).textValue());
}
@Test
public void singleValueFloatFeatureForVertex() throws IOException {
DataType dataType = DataType.Float;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Arrays.asList("Person", "Admin")));
labelSchema.put("rating", new PropertySchema("rating", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
Assert.assertEquals("min-max", feature.path("norm").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("rating", cols.get(1).textValue());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void shouldNotIncludeFeatureForMultiValueFloatFeatureForVertex() throws IOException {
DataType dataType = DataType.Float;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Collections.singletonList("Movie")));
labelSchema.put("encoding", new PropertySchema("encoding", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(0, features.size());
}
@Test
public void intFeatureForVertex() throws IOException {
DataType dataType = DataType.Integer;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Arrays.asList("Person", "Admin")));
labelSchema.put("age", new PropertySchema("age", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("min-max", feature.path("norm").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("age", cols.get(1).textValue());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void singleValueStringFeatureForVertex() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Collections.singletonList("Movie")));
labelSchema.put("class", new PropertySchema("class", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("class", cols.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void multiValueStringFeatureForVertex() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Collections.singletonList("Movie")));
labelSchema.put("movieType", new PropertySchema("movieType", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
Assert.assertEquals(";", feature.path("separator").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("movieType", cols.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
}
@Test
public void shouldAddWord2VecFeatureIfSpecifiedInConfig() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("genre", new PropertySchema("genre", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withWord2VecNodeFeature(
movieLabel,
"genre",
"en_core_web_lg", "fr_core_news_lg")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("word2vec", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("genre", cols.get(1).textValue());
ArrayNode language = (ArrayNode) feature.path("language");
Assert.assertEquals(2, language.size());
Assert.assertEquals("en_core_web_lg", language.get(0).textValue());
Assert.assertEquals("fr_core_news_lg", language.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void shouldNumericalBucketFeatureIfSpecifiedInConfig() throws IOException {
DataType dataType = DataType.Integer;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("score", new PropertySchema("score", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNumericalBucketFeature(movieLabel, "score", new Range(1, 100), 10, 2)
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("bucket_numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
Assert.assertEquals(10, feature.path("bucket_cnt").intValue());
Assert.assertEquals(2, feature.path("slide_window_size").intValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("score", cols.get(1).textValue());
ArrayNode range = (ArrayNode) feature.path("range");
Assert.assertEquals(2, range.size());
Assert.assertEquals(1, range.get(0).intValue());
Assert.assertEquals(100, range.get(1).intValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void shouldAddNumericalBucketFeatureForAllNumberTypes() throws IOException {
Collection<DataType> dataTypes = Arrays.asList(DataType.Byte, DataType.Integer, DataType.Double, DataType.Float, DataType.Long, DataType.Short);
boolean isNullable = false;
boolean isMultiValue = false;
for (DataType dataType : dataTypes) {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("score", new PropertySchema("score", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNumericalBucketFeature(movieLabel, "score", new Range(1, 100), 10, 2)
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("bucket_numerical", feature.path("sub_feat_type").textValue());
}
}
@Test
public void shouldAddWarningIfAttemptingToCreateNumericalBucketFeatureForMultiValueDataType() throws IOException {
DataType dataType = DataType.Integer;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("score", new PropertySchema("score", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNumericalBucketFeature(movieLabel, "score", new Range(1, 100), 10, 2)
.build())
.write();
JsonNode graph = output.graph();
ArrayNode warnings = output.warnings();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertTrue(array.get(0).path("labels").isMissingNode());
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(0, features.size());
Assert.assertEquals(1, warnings.size());
Assert.assertEquals("Unable to add numerical bucket feature: Property 'score' of node type 'Movie' is a multi-value property.", warnings.get(0).textValue());
}
@Test
public void singleValueNumericFeatureForEdge() throws IOException {
Collection<DataType> dataTypes = Arrays.asList(DataType.Byte, DataType.Integer, DataType.Double, DataType.Float, DataType.Long, DataType.Short);
boolean isNullable = false;
boolean isMultiValue = false;
for (DataType dataType : dataTypes) {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(new Label("knows", Collections.singletonList("Person"), Collections.singletonList("Person")));
labelSchema.put("strength", new PropertySchema("strength", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("edge", feature.path("feat_type").textValue());
Assert.assertEquals("numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("min-max", feature.path("norm").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(3, cols.size());
Assert.assertEquals("~from", cols.get(0).textValue());
Assert.assertEquals("~to", cols.get(1).textValue());
Assert.assertEquals("strength", cols.get(2).textValue());
ArrayNode edgeType = (ArrayNode) feature.path("edge_type");
Assert.assertEquals(3, edgeType.size());
Assert.assertEquals("Person", edgeType.get(0).textValue());
Assert.assertEquals("knows", edgeType.get(1).textValue());
Assert.assertEquals("Person", edgeType.get(2).textValue());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
}
}
| 4,127 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1LabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import java.util.EnumSet;
public class PropertyGraphTrainingDataConfigWriterV1LabelTest {
@Test
public void shouldAddNodeClassLabelIfSpecifiedInConfig() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
Label personLabel = new Label(Collections.singletonList("Person"));
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(personLabel);
labelSchema.put("role", new PropertySchema("role", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeClassLabel(personLabel, "role")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals("node", label.path("label_type").textValue());
Assert.assertEquals("node_class_label", label.path("sub_label_type").textValue());
ArrayNode cols = (ArrayNode) label.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("role", cols.get(1).textValue());
ArrayNode splitRates = (ArrayNode) label.path("split_rate");
Assert.assertEquals(3, splitRates.size());
Assert.assertEquals(0.7, splitRates.get(0).doubleValue(), 0.0);
Assert.assertEquals(0.1, splitRates.get(1).doubleValue(), 0.0);
Assert.assertEquals(0.2, splitRates.get(2).doubleValue(), 0.0);
Assert.assertEquals("Person", label.path("node_type").textValue());
Assert.assertTrue(label.path("separator").isMissingNode());
}
@Test
public void shouldAddWarningIfColumnDoesNotExistForNodeClassLabel() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
Label personLabel = new Label(Collections.singletonList("Person"));
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(personLabel);
labelSchema.put("role", new PropertySchema("role", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeClassLabel(personLabel, "does-not-exist")
.build())
.write();
JsonNode graph = output.graph();
ArrayNode warnings = output.warnings();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertTrue(array.get(0).path("labels").isMissingNode());
Assert.assertEquals(1, warnings.size());
Assert.assertEquals("Unable to add node class label: Node of type 'Person' does not contain property 'does-not-exist'.", warnings.get(0).textValue());
}
@Test
public void shouldAddSeparatorIfNodeClassLabelIsMultiValued() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = true;
Label personLabel = new Label(Collections.singletonList("Person"));
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(personLabel);
labelSchema.put("role", new PropertySchema("role", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeClassLabel(personLabel, "role")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals(";", label.path("separator").textValue());
}
@Test
public void shouldAddEdgeClassLabelIfSpecifiedInConfig() throws IOException {
Label knowsLabel = new Label("knows",
Collections.singletonList("Person"),
Collections.singletonList("Person"));
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(knowsLabel);
labelSchema.put("contact", new PropertySchema("contact", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withEdgeClassLabel(knowsLabel, "contact")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals("edge", label.path("label_type").textValue());
Assert.assertEquals("edge_class_label", label.path("sub_label_type").textValue());
ArrayNode cols = (ArrayNode) label.path("cols");
Assert.assertEquals(3, cols.size());
Assert.assertEquals("~from", cols.get(0).textValue());
Assert.assertEquals("~to", cols.get(1).textValue());
Assert.assertEquals("contact", cols.get(2).textValue());
ArrayNode splitRates = (ArrayNode) label.path("split_rate");
Assert.assertEquals(3, splitRates.size());
Assert.assertEquals(0.7, splitRates.get(0).doubleValue(), 0.0);
Assert.assertEquals(0.1, splitRates.get(1).doubleValue(), 0.0);
Assert.assertEquals(0.2, splitRates.get(2).doubleValue(), 0.0);
ArrayNode edgeType = (ArrayNode) label.path("edge_type");
Assert.assertEquals("Person", edgeType.get(0).textValue());
Assert.assertEquals("knows", edgeType.get(1).textValue());
Assert.assertEquals("Person", edgeType.get(2).textValue());
Assert.assertTrue(label.path("separator").isMissingNode());
}
@Test
public void shouldAddWarningIfColumnDoesNotExistForEdgeClassLabel() throws IOException {
Label knowsLabel = new Label("knows",
Collections.singletonList("Person"),
Collections.singletonList("Person"));
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(knowsLabel);
labelSchema.put("contact", new PropertySchema("contact", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withEdgeClassLabel(knowsLabel, "does-not-exist")
.build())
.write();
JsonNode graph = output.graph();
ArrayNode warnings = output.warnings();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertTrue(array.get(0).path("labels").isMissingNode());
Assert.assertEquals(1, warnings.size());
Assert.assertEquals("Unable to add edge class label: Edge of type 'knows' does not contain property 'does-not-exist'.", warnings.get(0).textValue());
}
@Test
public void shouldAddSeparatorIfEdgeClassLabelIsMultiValued() throws IOException {
Label knowsLabel = new Label("knows",
Collections.singletonList("Person"),
Collections.singletonList("Person"));
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(knowsLabel);
labelSchema.put("contact", new PropertySchema("contact", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withEdgeClassLabel(knowsLabel, "contact")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals(";", label.path("separator").textValue());
}
}
| 4,128 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/TrainingDataWriterConfigV1Test.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range;
import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.NumericalBucketFeatureConfigV1;
import com.amazonaws.services.neptune.propertygraph.Label;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
public class TrainingDataWriterConfigV1Test {
@Test
public void shouldThrowExceptionIfLowOrHighAreNotNumeric(){
Collection<Collection<?>> values = Arrays.asList(
Arrays.asList(1, "one"),
Arrays.asList("one", 1),
Arrays.asList(true, 1),
Arrays.asList(1, true)
);
for (Collection<?> value : values) {
Iterator<?> iterator = value.iterator();
Object low = iterator.next();
Object high = iterator.next();
try {
new NumericalBucketFeatureConfigV1(
new Label("my-label"),
"column", new Range(low, high), 10, 2);
Assert.fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
Assert.assertEquals("Low and high values must be numeric", e.getMessage());
}
}
}
@Test
public void shouldConvertLowOrHighToBroadestType(){
NumericalBucketFeatureConfigV1 config1 = new NumericalBucketFeatureConfigV1(
new Label("my-label"),
"column", new Range(1, 10L), 10, 2);
Assert.assertEquals(Long.class, config1.range().high().getClass());
Assert.assertEquals(Long.class, config1.range().low().getClass());
NumericalBucketFeatureConfigV1 config2 = new NumericalBucketFeatureConfigV1(
new Label("my-label"),
"column", new Range(0.1, 10), 10, 2);
Assert.assertEquals(Double.class, config2.range().high().getClass());
Assert.assertEquals(Double.class, config2.range().low().getClass());
}
}
| 4,129 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1FeatureOverrideTests.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator;
import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureOverrideConfigV1;
import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureTypeV1;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
public class PropertyGraphTrainingDataConfigWriterV1FeatureOverrideTests {
@Test
public void shouldOverrideNumericalWithCategoricalFeature() throws IOException {
DataType dataType = DataType.Float;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label label = new Label(Arrays.asList("Person", "Admin"));
LabelSchema labelSchema = new LabelSchema(label);
labelSchema.put("rating", new PropertySchema("rating", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeFeatureOverride(
new FeatureOverrideConfigV1(
label,
Collections.singletonList("rating"),
FeatureTypeV1.category,
null,
new Separator(","))).build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
Assert.assertEquals(",", feature.path("separator").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("rating", cols.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
}
@Test
public void shouldCreateMultiCategoricalFeature() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label label = new Label(Arrays.asList("Person", "Admin"));
LabelSchema labelSchema = new LabelSchema(label);
labelSchema.put("rating", new PropertySchema("rating", false, DataType.Float, false, EnumSet.noneOf(DataType.class)));
labelSchema.put("job", new PropertySchema("job", false, DataType.String, false, EnumSet.noneOf(DataType.class)));
labelSchema.put("rank", new PropertySchema("rank", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeFeatureOverride(
new FeatureOverrideConfigV1(
label,
Arrays.asList("job", "rank"),
FeatureTypeV1.category,
null,
new Separator(","))).build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(2, features.size());
JsonNode feature = features.get(1);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(3, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("job", cols.get(1).textValue());
Assert.assertEquals("rank", cols.get(2).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
}
| 4,130 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseBucketCountV1Test.java
|
package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class ParseBucketCountV1Test {
@Test
public void throwsErrorIfInvalidBucketCount() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("num_buckets", "one");
try {
new ParseBucketCountV1(json, new ParsingContext("context")).parseBucketCount();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'num_buckets' field for context. Expected an integer.", e.getMessage());
}
}
}
| 4,131 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/NoneFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class NoneFeatureTest {
@Test
public void shouldAllowSpecifyingNoneFeatureType() throws IOException {
runTest("t1.json");
}
@Test
public void settingFeatureEncodingToNoneResultsInNoneFeatureTypeForAllFeatures() throws IOException {
runTest("t2.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,132 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/EdgeLabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class EdgeLabelTest {
@Test
public void shouldCreateLabelForPropertyWithSpecifiedConfigValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldSupplyDefaultSplitRateIfSplitRateNotSpecified() throws IOException {
runTest("t2.json");
}
@Test
public void shouldUseTopLevelDefaultSplitRateIfSpecified() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAllowEmptyPropertyForLinkPrediction() throws IOException {
runTest("t4.json");
}
@Test
public void shouldAllowMissingPropertyForLinkPrediction() throws IOException {
runTest("t5.json");
}
@Test
public void shouldThrowExceptionIfEmptyPropertyAndNotLinkPrediction() throws IOException {
try {
runTest("t6.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Missing or empty 'property' field for edge regression specification (Label: [Person, knows, Person]).", e.getMessage());
}
}
@Test
public void shouldThrowExceptionIfMissingPropertyAndNotLinkPrediction() throws IOException {
try {
runTest("t7.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Missing or empty 'property' field for edge regression specification (Label: [Person, knows, Person]).", e.getMessage());
}
}
@Test
public void shouldThrowExceptionIfUnrecognisedLabelType() throws IOException {
try {
runTest("t8.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'type' value for edge label (Label: [Person, knows, Person]): 'invalid'. Valid values are: 'classification', 'regression', 'link_prediction'.", e.getMessage());
}
}
@Test
public void shouldAllowComplexToAndFromLabels() throws IOException {
runTest("t9.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("label"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,133 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/BucketNumericalFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class BucketNumericalFeatureTest {
@Test
public void shouldCreateBucketNumericalFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldThrowErrorIfSeparatorSuppliedForBucketNumericalFeature() throws IOException {
try {
runTest("t2.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'separator' field for bucket_numerical feature. Bucket numerical feature property cannot contain multiple values.", e.getMessage());
}
}
@Test
public void shouldCreateAutoInferredFeatureIfMultiValueProperty() throws IOException {
runTest("t3.json");
}
@Test
public void shouldThrowErrorIfRangeIsMissing() throws IOException {
try {
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'range' field for bucket_numerical feature (Label: Person, Property: age). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfRangeIsSpecifiedIncorrectly() throws IOException {
try {
runTest("t8.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'range' field for bucket_numerical feature (Label: Person, Property: age). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfBucketCountIsMissing() throws IOException {
try {
runTest("t5.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'bucket_cnt' field for bucket_numerical feature (Label: Person, Property: age). Expected an integer.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfBucketCountIsSpecifiedIncorrectly() throws IOException {
try {
runTest("t9.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'bucket_cnt' field for bucket_numerical feature (Label: Person, Property: age). Expected an integer.", e.getMessage());
}
}
@Test
public void shouldSupplyDefaultSlideWindowSizeIfSlideWindowSizeIsMissing() throws IOException {
runTest("t6.json");
}
@Test
public void shouldOmitImputerIfImputerIsMissing() throws IOException {
runTest("t7.json");
}
@Test
public void shouldSupportOldNumBucketsField() throws IOException {
runTest("t10.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,134 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/DatetimeFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class DatetimeFeatureTest {
@Test
public void shouldCreateDatetimeFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldCreateDatetimeFeatureConfigWithFewerDatetimePartsValues() throws IOException {
runTest("t2.json");
}
@Test
public void shouldCreateDatetimeFeatureConfigForAllDatetimePartsIfDatetimePartsIsMissing() throws IOException {
runTest("t3.json");
}
@Test
public void shouldCreateDatetimeFeatureConfigForAllDatetimePartsIfDatetimePartsIsEmpty() throws IOException {
runTest("t4.json");
}
@Test
public void shouldThrowErrorIfInvalidDatetimePart() throws IOException {
try {
runTest("t5.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Invalid 'datetime_parts' value for datetime feature (Label: Person, Property: created): 'invalid'. Valid values are: 'hour', 'weekday', 'month', 'year'.", e.getMessage());
}
}
@Test
public void shouldCreateAutoFeatureConfigForMultiValueDateProperty() throws IOException {
runTest("t6.json");
}
@Test
public void shouldAutoInferDatetimeFeatureForDateProperty() throws IOException {
runTest("t7.json");
}
@Test
public void shouldAutoInferAutoFeatureForMultiValueDateProperty() throws IOException {
runTest("t8.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,135 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/PropertyGraphTrainingDataConfigWriterV2FeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.FeatureEncodingFlag;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
public class PropertyGraphTrainingDataConfigWriterV2FeatureTest {
@Test
public void shouldWriteVersionAndQueryEngine() throws IOException {
runTest("t1.json");
}
@Test
public void shouldAddNodeAndEdgeObjectForEachFile() throws IOException {
runTest("t2.json");
}
@Test
public void shouldAddAutoFeatureForSingleValueStringProperty() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorForMultiValueStringProperty() throws IOException {
runTest("t4.json");
}
@Test
public void shouldAddNumericFeatureWithNormMinMaxAndMedianImputerForSingleValueIntegerProperty() throws IOException {
runTest("t5.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorAndMedianImputerForMultiValueIntegerProperty() throws IOException {
runTest("t6.json");
}
@Test
public void shouldAddNumericFeatureWithNormMinMaxAndMedianImputerForSingleValueFloatProperty() throws IOException {
runTest("t7.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorAndMedianImputerForMultiValueFloatProperty() throws IOException {
runTest("t8.json");
}
@Test
public void shouldAddDatetimeFeatureWithAllDatetimePartsForSingleValueDateProperty() throws IOException {
runTest("t9.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorForMultiValueDateProperty() throws IOException {
runTest("t10.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,136 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/NumericalCategoryAndAutoFeatureOverrideTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class NumericalCategoryAndAutoFeatureOverrideTest {
@Test
public void shouldAllowNumericalOverrideAndSupplyDefaultConfigFieldValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldAllowNumericalOverrideAndSupplyDefaultConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t5.json");
}
@Test
public void shouldAllowNumericalOverrideAndUseSpecifiedConfigFieldValues() throws IOException {
runTest("t2.json");
}
@Test
public void shouldAllowNumericalOverrideAndUseSpecifiedConfigFieldValuesIncludingSeparatorForSingleValueProperty() throws IOException {
runTest("t8.json");
}
@Test
public void shouldAllowNumericalOverrideAndUseSpecifiedConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t6.json");
}
@Test
public void shouldAddWarningForOverrideForPropertyThatDoesNotExist() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAllowCategoryOverrideAndSupplyDefaultConfigFieldValues() throws IOException {
runTest("t4.json");
}
@Test
public void shouldAllowCategoryOverrideAndSupplyDefaultConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t7.json");
}
@Test
public void shouldAllowCategoryOverrideAndUseSpecifiedConfigFieldValuesForSingleValueProperty() throws IOException {
runTest("t9.json");
}
@Test
public void shouldAllowCategoryOverrideAndUseSpecifiedConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t12.json");
}
@Test
public void shouldAllowAutoOverrideForNumericalFeatureWithSuppliedSeparatorAndImputerIgnoringAllOtherConfigValues() throws IOException {
runTest("t10.json");
}
@Test
public void shouldAllowAutoOverrideForNumericalFeatureWithoutImputerIfNotSupplied() throws IOException {
runTest("t11.json");
}
@Test
public void autoOverrideForIntegerMultiValuePropertyWithNoAdditionalConfigValuesShouldProduceAutoFeatureWithSeparatorButNoImputer() throws IOException {
runTest("t13.json");
}
@Test
public void autoOverrideForIntegerSingleValuePropertyWithNoAdditionalConfigValuesShouldProduceAutoFeatureNoOtherConfig() throws IOException {
runTest("t14.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,137 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextFastTextFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TextFastTextFeatureTest {
@Test
public void shouldCreateFastTextFeatureConfigForNodeWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldCreateFastTextFeatureConfigForEdgeWithSuppliedValues() throws IOException {
runTest("t2.json");
}
@Test
public void shouldAddWarningForUnknownLanguage() throws IOException {
runTest("t3.json");
}
@Test
public void shouldRaiseErrorIfLanguageMissing() throws IOException {
try {
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'language' field for text_fasttext feature. Expected one of the following values: 'en', 'zh', 'hi', 'es', 'fr'.", e.getMessage());
}
}
@Test
public void shouldAllowMissingMaxLangthProperty() throws IOException {
runTest("t5.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,138 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextWord2VecFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class TextWord2VecFeatureTest {
@Test
public void shouldCreateWord2VecFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldUseDefaultLanguageIfLanguageIsMissing() throws IOException {
runTest("t2.json");
}
@Test
public void shouldUseDefaultLanguageIfLanguageIsEmpty() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAddWarningIfUnsupportedLanguageIsSupplied() throws IOException {
runTest("t4.json");
}
@Test
public void shouldSupportOldWord2VecFeatureName() throws IOException {
runTest("t5.json");
}
@Test
public void shouldCreateAutoFeatureConfigForMultiValueProperty() throws IOException {
runTest("t6.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,139 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/NodeLabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class NodeLabelTest {
@Test
public void shouldCreateLabelForPropertyWithSpecifiedConfigValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldAddSeparatorForMultiValueProperty() throws IOException {
runTest("t2.json");
}
@Test
public void ifSeparatorIsSpecifiedForMultiValuePropertyThisIsUsedInsteadOfDefaultSeparator() throws IOException {
runTest("t8.json");
}
@Test
public void shouldSupplyDefaultSplitRateIfSplitRateNotSpecified() throws IOException {
runTest("t3.json");
}
@Test
public void shouldUseTopLevelDefaultSplitRateIfSpecified() throws IOException {
runTest("t4.json");
}
@Test
public void shouldUseSpecificSplitRateInPreferenceToTopLevelDefaultSplitRate() throws IOException {
runTest("t10.json");
}
@Test
public void shouldAddWarningIfPropertyDoesNotExist() throws IOException {
runTest("t5.json");
}
@Test
public void shouldThrowExceptionIfUnrecognisedLabelType() throws IOException {
try {
runTest("t6.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'type' value for node label (Label: Person, Property: credit): 'unknown'. Valid values are: 'classification', 'regression'.", e.getMessage());
}
}
@Test
public void classificationLabelShouldTakePrecedenceOverFeatureOverride() throws IOException {
runTest("t7.json");
}
@Test
public void shouldThrowAnErrorIfNoPropertySpecified() throws IOException {
try {
runTest("t9.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'property' field for node label (Label: Person). Expected a 'property' field with a string value.", e.getMessage());
}
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("label"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,140 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextTfIdfTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static junit.framework.TestCase.fail;
import static org.junit.Assert.assertEquals;
public class TextTfIdfTest {
@Test
public void shouldCreateTfTidFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldThrowErrorIfNgramRangeIsMissing() throws IOException {
try {
runTest("t2.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfNgramRangeHasOnlyOneElement() throws IOException {
try {
runTest("t3.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfNgramRangeHasMoreThanTwoElements() throws IOException {
try {
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfNgramRangeHasNonNumericElements() throws IOException {
try {
runTest("t5.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMinDfIsMissing() throws IOException {
try {
runTest("t6.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'min_df' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMinDfIsNonNumeric() throws IOException {
try {
runTest("t7.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'min_df' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMaxFeaturesIsMissing() throws IOException {
try {
runTest("t8.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'max_features' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMaxFeaturesIsNonNumeric() throws IOException {
try {
runTest("t9.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'max_features' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldCreateAutoFeatureConfigForMultiValueProperty() throws IOException {
runTest("t10.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,141 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextSbertFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TextSbertFeatureTest {
@Test
public void shouldCreateSbertTextFeatureConfigForNodeWithDefaultTextSBertType() throws IOException {
runTest("t1.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForNodeWithSBert128Type() throws IOException {
runTest("t2.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForNodeWithSBert512Type() throws IOException {
runTest("t3.json");
}
@Test
public void shouldThrowErrorForUnrecognizedSBertType() throws IOException {
try{
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals(e.getMessage(), "Illegal feature type: 'text_sbertUnknown'. Supported values are: 'bucket_numerical', 'text_word2vec', 'text_fasttext', 'text_sbert', 'text_sbert128', 'text_sbert512', 'category', 'numerical', 'text_tfidf', 'datetime', 'auto', 'none'.");
}
}
@Test
public void shouldCreateSbertTextFeatureConfigForEdgeWithDefaultTextSBertType() throws IOException {
runTest("t5.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForEdgeWithSBert128Type() throws IOException {
runTest("t6.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForEdgeWithSBert512Type() throws IOException {
runTest("t7.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 4,142 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TrainingDataWriterConfigV2Test.java
|
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.*;
public class TrainingDataWriterConfigV2Test {
@Test
public void shouldCreateSingleConfig() throws IOException {
JsonNode json = JsonFromResource.get("t1.json", getClass());
Collection<TrainingDataWriterConfigV2> config = TrainingDataWriterConfigV2.fromJson(json.path("neptune_ml"), NeptuneMLSourceDataModel.PropertyGraph);
assertEquals(1, config.size());
}
@Test
public void shouldConfigForEachElementInArray() throws IOException {
JsonNode json = JsonFromResource.get("t2.json", getClass());
Collection<TrainingDataWriterConfigV2> config = TrainingDataWriterConfigV2.fromJson(json.path("neptune_ml"), NeptuneMLSourceDataModel.PropertyGraph);
assertEquals(3, config.size());
}
@Test
public void shouldConfigForEachElementInJobsArray() throws IOException {
JsonNode json = JsonFromResource.get("t3.json", getClass());
Collection<TrainingDataWriterConfigV2> config = TrainingDataWriterConfigV2.fromJson(json.path("neptune_ml"), NeptuneMLSourceDataModel.PropertyGraph);
assertEquals(5, config.size());
}
}
| 4,143 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseTaskTypeV2Test.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseTaskTypeV2Test {
@Test
public void throwsErrorIfInvalidTaskType(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("task_type", "invalid");
try{
new ParseTaskTypeV2(json, new ParsingContext("context")).parseTaskType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Invalid 'task_type' value for context: 'invalid'. Valid values are: 'link_prediction', 'node_classification', 'node_regression', 'edge_classification', 'edge_regression'.", e.getMessage());
}
}
@Test
public void throwsErrorIfMissingTaskType(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("not_a_task_type", "a_value");
try{
new ParseTaskTypeV2(json, new ParsingContext("context")).parseTaskType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Error parsing 'task_type' field for context. Expected one of the following values: 'link_prediction', 'node_classification', 'node_regression', 'edge_classification', 'edge_regression'.", e.getMessage());
}
}
}
| 4,144 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseImputerTypeV2Test.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.ImputerTypeV2;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseImputerTypeV2Test {
@Test
public void throwsErrorIfInvalidImputer(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("imputer", "invalid");
try{
new ParseImputerTypeV2(json, new ParsingContext("context")).parseImputerType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Invalid 'imputer' value for context: 'invalid'. Valid values are: 'mean', 'median', 'most-frequent'.", e.getMessage());
}
}
@Test
public void returnsNoneIfImputerMissing(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
ImputerTypeV2 imputerType = new ParseImputerTypeV2(json, new ParsingContext("context")).parseImputerType();
assertEquals(imputerType, ImputerTypeV2.none);
}
}
| 4,145 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseNormTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class ParseNormTest {
@Test
public void throwsErrorIfInvalidNorm() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("norm", "invalid");
try {
new ParseNorm(json, new ParsingContext("node feature").withLabel(new Label("Person")).withProperty("age")).parseNorm();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'norm' value for node feature (Label: Person, Property: age): 'invalid'. Valid values are: 'none', 'min-max', 'standard'.", e.getMessage());
}
}
}
| 4,146 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseNodeTypeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseNodeTypeTest {
@Test
public void canParseSimpleLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("node", "Person");
Label label = new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
assertEquals("Person", label.fullyQualifiedLabel());
}
@Test
public void canParseMultiLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add("Person");
arrayNode.add("Admin");
json.set("node", arrayNode);
Label label = new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
assertEquals("Admin;Person", label.fullyQualifiedLabel());
}
@Test
public void canParseSemicolonSeparatedMultiLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("node", "Person;Admin");
Label label = new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
assertEquals("Admin;Person", label.fullyQualifiedLabel());
}
@Test
public void throwsErrorIfNodeFieldIsMissing() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
try{
new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Error parsing 'node' field for node. Expected a text value or array of text values.", e.getMessage());
}
}
@Test
public void throwsErrorIfNodeFieldIsNotText() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("node", 1);
try{
new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Error parsing 'node' field for node. Expected a text value or array of text values.", e.getMessage());
}
}
}
| 4,147 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseRangeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class ParseRangeTest {
@Test
public void shouldParseRangeFromJson() {
ObjectNode root = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add(1);
arrayNode.add(10L);
root.set("range", arrayNode);
ParseRange parseRange = new ParseRange(root, "range", new ParsingContext("desc"));
Range range = parseRange.parseRange();
assertEquals(1L, range.low());
assertEquals(10L, range.high());
}
@Test
public void shouldParseRangeFromJsonWithHighLowSwitched() {
ObjectNode root = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add(10L);
arrayNode.add(1);
root.set("range", arrayNode);
ParseRange parseRange = new ParseRange(root, "range", new ParsingContext("desc"));
Range range = parseRange.parseRange();
assertEquals(1L, range.low());
assertEquals(10L, range.high());
}
}
| 4,148 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseEdgeTypeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseEdgeType;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseEdgeTypeTest {
@Test
public void shouldParseEdge() throws JsonProcessingException {
String json = "{ \"edge\": [\"person\", \"wrote\", \"post\"]}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
ParseEdgeType parseEdgeType = new ParseEdgeType(jsonNode, new ParsingContext("DESC"));
Label label = parseEdgeType.parseEdgeType();
assertEquals("person", label.fromLabelsAsString());
assertEquals("wrote", label.labelsAsString());
assertEquals("post", label.toLabelsAsString());
}
@Test
public void shouldParseEdgeWithSemicolons() throws JsonProcessingException {
String json = "{ \"edge\": [\"person;admin\", \"wrote\", \"post;content\"]}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
ParseEdgeType parseEdgeType = new ParseEdgeType(jsonNode, new ParsingContext("DESC"));
Label label = parseEdgeType.parseEdgeType();
assertEquals("admin;person", label.fromLabelsAsString());
assertEquals("wrote", label.labelsAsString());
assertEquals("content;post", label.toLabelsAsString());
}
@Test
public void shouldEscapeSemicolons() throws JsonProcessingException {
String json = "{ \"edge\": [\"person;admin\\\\;aa\", \"wrote;x\", \"post;content\"]}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
ParseEdgeType parseEdgeType = new ParseEdgeType(jsonNode, new ParsingContext("DESC"));
Label label = parseEdgeType.parseEdgeType();
assertEquals("admin\\;aa;person", label.fromLabelsAsString());
assertEquals("wrote\\;x", label.labelsAsString());
assertEquals("content;post", label.toLabelsAsString());
}
@Test
public void canParseSimpleEdge() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add("Person");
arrayNode.add("wrote");
arrayNode.add("Post");
json.set("edge", arrayNode);
Label label = new ParseEdgeType(json, new ParsingContext("edge")).parseEdgeType();
assertEquals("(Person)-wrote-(Post)", label.fullyQualifiedLabel());
}
@Test
public void canParseComplexEdge() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add(arrayFrom("Person", "Admin"));
arrayNode.add("wrote");
arrayNode.add(arrayFrom("Post", "Content"));
json.set("edge", arrayNode);
Label label = new ParseEdgeType(json, new ParsingContext("edge")).parseEdgeType();
assertEquals("(Admin;Person)-wrote-(Content;Post)", label.fullyQualifiedLabel());
}
@Test
public void canParseEdgeWithOneComplexLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add("Person"); // Simple
arrayNode.add("wrote");
arrayNode.add(arrayFrom("Post", "Content")); // Complex
json.set("edge", arrayNode);
Label label = new ParseEdgeType(json, new ParsingContext("edge")).parseEdgeType();
assertEquals("(Person)-wrote-(Content;Post)", label.fullyQualifiedLabel());
}
private ArrayNode arrayFrom(String... values){
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
for (String value : values) {
arrayNode.add(value);
}
return arrayNode;
}
}
| 4,149 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/export/ParamConverterTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.export;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParamConverterTest {
@Test
public void shouldConvertCamelCaseToDashDelimitedLowerCase(){
assertEquals("my-long-args", ParamConverter.toCliArg("myLongArgs"));
}
@Test
public void shouldSingularizeValue(){
assertEquals("endpoint", ParamConverter.singularize("endpoints"));
assertEquals("name", ParamConverter.singularize("name"));
assertEquals("query", ParamConverter.singularize("queries"));
}
@Test
public void shouldConvertParams() throws JsonProcessingException {
String json = "{\n" +
" \"endpoints\": [\"endpoint1\", \"endpoint2\"],\n" +
" \"profile\": \"neptune_ml\",\n" +
" \"useIamAuth\": true,\n" +
" \"cloneCluster\": true,\n" +
" \"cloneClusterReplicaCount\": 2\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Args args = ParamConverter.fromJson("export-pg", jsonNode);
assertEquals("export-pg --endpoint 'endpoint1' --endpoint 'endpoint2' --profile 'neptune_ml' --use-iam-auth --clone-cluster --clone-cluster-replica-count 2", args.toString());
}
}
| 4,150 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/export/EndpointValidatorTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.export;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class EndpointValidatorTest {
@Test
public void shouldRemoveProtocol() {
Collection<String> endpoints = Arrays.asList("my-endpoint", "ws://my-endpoint", "wss://my-endpoint", "http://my-endpoint", "https://my-endpoint");
for (String endpoint : endpoints) {
assertEquals("my-endpoint", EndpointValidator.validate(endpoint));
}
}
@Test
public void shouldRemovePort() {
Collection<String> endpoints = Arrays.asList("my-endpoint", "my-endpoint:8182");
for (String endpoint : endpoints) {
assertEquals("my-endpoint", EndpointValidator.validate(endpoint));
}
}
@Test
public void shouldRemoveProtocolAndPort() {
Collection<String> endpoints = Arrays.asList("my-endpoint", "https://my-endpoint:8182");
for (String endpoint : endpoints) {
assertEquals("my-endpoint", EndpointValidator.validate(endpoint));
}
}
}
| 4,151 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/export/ArgsTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.export;
import org.junit.Test;
import java.util.Collection;
import java.util.Iterator;
import static org.junit.Assert.*;
public class ArgsTest {
@Test
public void shouldRemoveOptions() throws Exception {
Args args = new Args("-x \"extra\" -e endpoint -c config -q query -f file");
args.removeOptions("-x", "-c", "-f", "-notpresent");
assertArrayEquals(new String[]{"-e", "endpoint", "-q", "query"}, args.values());
}
@Test
public void shouldRemoveMultipleOccurrencesOfOption() throws Exception {
Args args = new Args("-e endpoint -l label1 -l label2 -l label3");
args.removeOptions("-l");
assertArrayEquals(new String[]{"-e", "endpoint"}, args.values());
}
@Test
public void shouldRemoveFlags() throws Exception {
Args args = new Args("-e endpoint -flag1 -c config -flag2 -q query");
args.removeFlags("-flag1", "-flag2");
assertArrayEquals(new String[]{"-e", "endpoint", "-c", "config", "-q", "query"}, args.values());
}
@Test
public void shouldAddOption() throws Exception {
Args args = new Args("-e endpoint -c config");
args.addOption("-l", "label1");
args.addOption("-q", "result=\"g.V('id').toList()\"");
assertArrayEquals(new String[]{"-e", "endpoint", "-c", "config", "-l", "label1", "-q", "result=\"g.V('id').toList()\""}, args.values());
}
@Test
public void shouldFormatAsString() throws Exception {
Args args = new Args("-e endpoint -c config");
assertEquals("-e endpoint -c config", args.toString());
}
@Test
public void shouldIndicateWhetherArgsContainArg(){
Args args = new Args("-e endpoint -c config");
assertTrue(args.contains("-c"));
assertFalse(args.contains("-x"));
}
@Test
public void shouldIndicateWhetherArgsContainArgWithValue(){
Args args = new Args("-e endpoint --profile xyz --profile neptune_ml -c config -b");
assertTrue(args.contains("--profile", "neptune_ml"));
assertFalse(args.contains("-b", "xyz"));
}
@Test
public void shouldIndicateWhetherArgsContainArgWithQuotedValue(){
Args args = new Args("-e endpoint --profile xyz --profile \"neptune_ml\" -c config -b");
assertTrue(args.contains("--profile", "neptune_ml"));
assertFalse(args.contains("-b", "xyz"));
}
@Test
public void shouldReplaceArg(){
Args args = new Args("export-pg -e endpoint --profile xyz");
args.replace("export-pg", "export-pg-from-config");
assertEquals("export-pg-from-config -e endpoint --profile xyz", args.toString());
}
@Test
public void shouldIndicateWhetherAnyOfTheSuppliedArgsIsPresent(){
Args args = new Args("export-pg -e endpoint --profile xyz");
assertTrue(args.containsAny("x", "y", "-e", "z"));
assertFalse(args.containsAny("x", "y", "z"));
}
@Test
public void shouldGetFirstOptionValue(){
Args args = new Args("export-pg -e endpoint --profile xyz --profile abc -e endpoint --use-ssl --profile 123");
assertEquals("xyz", args.getFirstOptionValue("--profile"));
}
@Test
public void shouldGetAllOptionValues(){
Args args = new Args("export-pg -e endpoint --profile xyz --profile abc -e endpoint --use-ssl --profile 123");
Collection<String> optionValues = args.getOptionValues("--profile");
assertEquals(3, optionValues.size());
Iterator<String> iterator = optionValues.iterator();
assertEquals("xyz", iterator.next());
assertEquals("abc", iterator.next());
assertEquals("123", iterator.next());
}
}
| 4,152 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/export/NeptuneExportLambdaTest.java
|
package com.amazonaws.services.neptune.export;
import com.amazonaws.SDKGlobalConfiguration;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.LambdaLogger;
import com.amazonaws.util.StringInputStream;
import com.fasterxml.jackson.core.JsonParseException;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.Assertion;
import org.junit.contrib.java.lang.system.ExpectedSystemExit;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NeptuneExportLambdaTest {
private Context context;
private LambdaLogger logger;
private ByteArrayOutputStream outputStreamCaptor;
@Rule
public final ExpectedSystemExit exit = ExpectedSystemExit.none();
@Before
public void setup() {
context = mock(Context.class);
logger = mock(LambdaLogger.class);
when(context.getLogger()).thenReturn(logger);
outputStreamCaptor = new ByteArrayOutputStream();
System.setErr(new PrintStream(outputStreamCaptor));
}
@Test
public void shouldRejectIllegalArguments() throws IOException {
exit.expectSystemExitWithStatus(-1);
exit.checkAssertionAfterwards(new ErrorChecker("Found unexpected parameters:"));
System.setProperty(SDKGlobalConfiguration.AWS_REGION_SYSTEM_PROPERTY, "us-west-2");
NeptuneExportLambda lambda = new NeptuneExportLambda();
String input = "{" +
"\"params\": {\"endpoint\" : \"fakeEndpoint\"," +
"\"illegalArgument\": \"test\"}}";
lambda.handleRequest(new StringInputStream(input), mock(OutputStream.class), context);
}
@Test
public void shouldRejectMissingRequiredArguments() throws IOException {
exit.expectSystemExitWithStatus(-1);
exit.checkAssertionAfterwards(new ErrorChecker("One/more of the following options must be specified: -e, --endpoint, --cluster-id, --cluster, --clusterid"));
System.setProperty(SDKGlobalConfiguration.AWS_REGION_SYSTEM_PROPERTY, "us-west-2");
NeptuneExportLambda lambda = new NeptuneExportLambda();
String input = "{\"command\": \"export-pg\", \"params\": {}}";
lambda.handleRequest(new StringInputStream(input), mock(OutputStream.class), context);
}
@Test
public void shouldRejectMalformedJSON() throws IOException {
NeptuneExportLambda lambda = new NeptuneExportLambda();
String input = "{[}";
assertThrows(JsonParseException.class,
() -> lambda.handleRequest(new StringInputStream(input), mock(OutputStream.class), context));
}
private class ErrorChecker implements Assertion {
private String expectedMessage;
ErrorChecker(String expectedMessage) {
this.expectedMessage = expectedMessage;
}
@Override
public void checkAssertion() throws Exception {
String capturedErrors = new String(outputStreamCaptor.toByteArray());
assertTrue(capturedErrors.contains(expectedMessage));
}
}
}
| 4,153 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/rdf
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/rdf/io/NeptuneExportSparqlRepositoryTest.java
|
package com.amazonaws.services.neptune.rdf.io;
import com.amazonaws.neptune.auth.NeptuneSigV4SignerException;
import com.amazonaws.services.neptune.cluster.ConnectionConfig;
import org.apache.http.Header;
import org.apache.http.conn.EofSensorInputStream;
import org.apache.http.impl.io.ChunkedInputStream;
import org.apache.http.message.BasicHeader;
import org.apache.http.protocol.HttpContext;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NeptuneExportSparqlRepositoryTest {
@Test
public void ShouldGetEmptyErrorMessageFromNewRepository() throws NeptuneSigV4SignerException {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
NeptuneExportSparqlRepository repo = new NeptuneExportSparqlRepository("test", null, null, mockedConfig);
assertEquals("", repo.getErrorMessageFromTrailers());
}
@Test
public void ShouldGetTrailerErrorMessagesFromChunkedStream() throws NeptuneSigV4SignerException {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
NeptuneExportSparqlRepository repo = new NeptuneExportSparqlRepository("test", null, null, mockedConfig);
ChunkedInputStream mockedStream = mock(ChunkedInputStream.class);
when(mockedStream.getFooters()).thenReturn(new Header[]{new BasicHeader("name", "value")});
HttpContext mockedContext = mock(HttpContext.class);
when(mockedContext.getAttribute("raw-response-inputstream")).thenReturn(mockedStream);
repo.setLastContext(mockedContext);
assertEquals("name: value\n", repo.getErrorMessageFromTrailers());
}
@Test
public void ShouldGetTrailerErrorMessagesFromEofSensorInputStream() throws NeptuneSigV4SignerException {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
NeptuneExportSparqlRepository repo = new NeptuneExportSparqlRepository("test", null, null, mockedConfig);
ChunkedInputStream mockedStream = mock(ChunkedInputStream.class);
when(mockedStream.getFooters()).thenReturn(new Header[]{new BasicHeader("name", "value")});
EofSensorInputStream eofSensorInputStream = new EofSensorInputStream(mockedStream, null);
HttpContext mockedContext = mock(HttpContext.class);
when(mockedContext.getAttribute("raw-response-inputstream")).thenReturn(eofSensorInputStream);
repo.setLastContext(mockedContext);
assertEquals("name: value\n", repo.getErrorMessageFromTrailers());
}
}
| 4,154 |
0 |
Create_ds/neptune-export/src/main/java/org/apache/tinkerpop/gremlin
|
Create_ds/neptune-export/src/main/java/org/apache/tinkerpop/gremlin/driver/LBAwareSigV4WebSocketChannelizer.java
|
/*
* Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.driver;
import com.amazon.neptune.gremlin.driver.sigv4.AwsSigV4ClientHandshaker;
import com.amazon.neptune.gremlin.driver.sigv4.ChainedSigV4PropertiesProvider;
import com.amazonaws.services.neptune.auth.HandshakeRequestConfig;
import com.amazonaws.services.neptune.auth.LBAwareAwsSigV4ClientHandshaker;
import io.netty.channel.Channel;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.http.EmptyHttpHeaders;
import io.netty.handler.codec.http.HttpClientCodec;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.websocketx.CloseWebSocketFrame;
import io.netty.handler.codec.http.websocketx.WebSocketClientHandshaker;
import io.netty.handler.codec.http.websocketx.WebSocketVersion;
import org.apache.tinkerpop.gremlin.driver.exception.ConnectionException;
import org.apache.tinkerpop.gremlin.driver.handler.WebSocketClientHandler;
import org.apache.tinkerpop.gremlin.driver.handler.WebSocketGremlinRequestEncoder;
import org.apache.tinkerpop.gremlin.driver.handler.WebSocketGremlinResponseDecoder;
import java.util.concurrent.TimeUnit;
public class LBAwareSigV4WebSocketChannelizer extends Channelizer.AbstractChannelizer {
/**
* Constant to denote the websocket protocol.
*/
private static final String WEB_SOCKET = "ws";
/**
* Constant to denote the websocket secure protocol.
*/
private static final String WEB_SOCKET_SECURE = "wss";
/**
* Name of the HttpCodec handler.
*/
private static final String HTTP_CODEC = "http-codec";
/**
* Name of the HttpAggregator handler.
*/
private static final String AGGREGATOR = "aggregator";
/**
* Name of the WebSocket handler.
*/
protected static final String WEB_SOCKET_HANDLER = "ws-handler";
/**
* Name of the GremlinEncoder handler.
*/
private static final String GREMLIN_ENCODER = "gremlin-encoder";
/**
* Name of the GremlinDecoder handler.
*/
private static final String GRELIN_DECODER = "gremlin-decoder";
/**
* Handshake timeout.
*/
private static final int HANDSHAKE_TIMEOUT_MILLIS = 15000;
/**
* The handler to process websocket messages from the server.
*/
private WebSocketClientHandler handler;
/**
* Encoder to encode websocket requests.
*/
private WebSocketGremlinRequestEncoder webSocketGremlinRequestEncoder;
/**
* Decoder to decode websocket requests.
*/
private WebSocketGremlinResponseDecoder webSocketGremlinResponseDecoder;
/**
* Initializes the channelizer.
* @param connection the {@link Connection} object.
*/
@Override
public void init(final Connection connection) {
super.init(connection);
webSocketGremlinRequestEncoder = new WebSocketGremlinRequestEncoder(true, cluster.getSerializer());
webSocketGremlinResponseDecoder = new WebSocketGremlinResponseDecoder(cluster.getSerializer());
}
/**
* Sends a {@code CloseWebSocketFrame} to the server for the specified channel.
*/
@Override
public void close(final Channel channel) {
if (channel.isOpen()) {
channel.writeAndFlush(new CloseWebSocketFrame());
}
}
@Override
public boolean supportsSsl() {
final String scheme = connection.getUri().getScheme();
return "wss".equalsIgnoreCase(scheme);
}
@Override
public void configure(final ChannelPipeline pipeline) {
final String scheme = connection.getUri().getScheme();
if (!WEB_SOCKET.equalsIgnoreCase(scheme) && !WEB_SOCKET_SECURE.equalsIgnoreCase(scheme)) {
throw new IllegalStateException(String.format("Unsupported scheme (only %s: or %s: supported): %s",
WEB_SOCKET, WEB_SOCKET_SECURE, scheme));
}
if (!supportsSsl() && WEB_SOCKET_SECURE.equalsIgnoreCase(scheme)) {
throw new IllegalStateException(String.format("To use %s scheme ensure that enableSsl is set to true in "
+ "configuration",
WEB_SOCKET_SECURE));
}
final int maxContentLength = cluster.connectionPoolSettings().maxContentLength;
handler = createHandler();
pipeline.addLast(HTTP_CODEC, new HttpClientCodec());
pipeline.addLast(AGGREGATOR, new HttpObjectAggregator(maxContentLength));
pipeline.addLast(WEB_SOCKET_HANDLER, handler);
pipeline.addLast(GREMLIN_ENCODER, webSocketGremlinRequestEncoder);
pipeline.addLast(GRELIN_DECODER, webSocketGremlinResponseDecoder);
}
@Override
public void connected() {
try {
// block for a few seconds - if the handshake takes longer 15 seconds than there's gotta be issues with that
// server. more than likely, SSL is enabled on the server, but the client forgot to enable it or
// perhaps the server is not configured for websockets.
handler.handshakeFuture().get(HANDSHAKE_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw new RuntimeException(new ConnectionException(connection.getUri(),
"Could not complete websocket handshake - ensure that client protocol matches server", ex));
}
}
/**
* Creates an instance of {@link WebSocketClientHandler} with {@link AwsSigV4ClientHandshaker} as the handshaker
* for SigV4 auth.
* @return the instance of clientHandler.
*/
private WebSocketClientHandler createHandler() {
HandshakeRequestConfig handshakeRequestConfig =
HandshakeRequestConfig.parse(cluster.authProperties().get(AuthProperties.Property.JAAS_ENTRY));
WebSocketClientHandshaker handshaker = new LBAwareAwsSigV4ClientHandshaker(
connection.getUri(),
WebSocketVersion.V13,
null,
false,
EmptyHttpHeaders.INSTANCE,
cluster.getMaxContentLength(),
new ChainedSigV4PropertiesProvider(),
handshakeRequestConfig);
return new WebSocketClientHandler(handshaker, 10000, supportsSsl());
}
}
| 4,155 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/CreatePropertyGraphExportConfig.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.*;
import com.amazonaws.services.neptune.cluster.Cluster;
import com.amazonaws.services.neptune.io.Directories;
import com.amazonaws.services.neptune.io.DirectoryStructure;
import com.amazonaws.services.neptune.io.Target;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient;
import com.amazonaws.services.neptune.propertygraph.io.ExportPropertyGraphJob;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphTargetConfig;
import com.amazonaws.services.neptune.propertygraph.schema.CreateGraphSchemaCommand;
import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.amazonaws.services.neptune.util.CheckedActivity;
import com.amazonaws.services.neptune.util.Timer;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.help.Examples;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import javax.inject.Inject;
import java.util.Collection;
@Examples(examples = {
"bin/neptune-export.sh create-pg-config -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output",
"bin/neptune-export.sh create-pg-config -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output --sample --sample-size 100",
"bin/neptune-export.sh create-pg-config -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -nl User -el FOLLOWS"
}, descriptions = {
"Create schema config file for all node and edge labels and save it to /home/ec2-user/output",
"Create schema config file for all node and edge labels, sampling 100 nodes and edges for each label",
"Create config file containing schema for User nodes and FOLLOWS edges"
})
@Command(name = "create-pg-config", description = "Create a property graph schema config file.")
public class CreatePropertyGraphExportConfig extends NeptuneExportCommand implements Runnable {
@Inject
private CloneClusterModule cloneStrategy = new CloneClusterModule();
@Inject
private CommonConnectionModule connection = new CommonConnectionModule(awsCli);
@Inject
private PropertyGraphTargetModule target = new PropertyGraphTargetModule(Target.devnull);
@Inject
private PropertyGraphScopeModule scope = new PropertyGraphScopeModule();
@Inject
private PropertyGraphConcurrencyModule concurrency = new PropertyGraphConcurrencyModule();
@Inject
private PropertyGraphSerializationModule serialization = new PropertyGraphSerializationModule();
@Inject
private PropertyGraphSchemaInferencingModule sampling = new PropertyGraphSchemaInferencingModule();
@Inject
private GremlinFiltersModule gremlinFilters = new GremlinFiltersModule();
@Override
public void run() {
try {
Timer.timedActivity("creating property graph config", (CheckedActivity.Runnable) () -> {
try (Cluster cluster = cloneStrategy.cloneCluster(
connection.clusterMetadata(),
connection.config(),
concurrency.config(sampling.isFullScan()),
featureToggles())) {
if (sampling.isFullScan()) {
Directories directories = target.createDirectories(DirectoryStructure.Config);
JsonResource<GraphSchema, Boolean> configFileResource = directories.configFileResource();
JsonResource<ExportStats, GraphSchema> statsFileResource = directories.statsFileResource();
GraphSchema graphSchema = new GraphSchema();
ExportStats stats = new ExportStats();
PropertyGraphTargetConfig targetConfig = target.config(directories, new PrinterOptionsModule().config());
Collection<ExportSpecification> exportSpecifications = scope.exportSpecifications(
graphSchema,
gremlinFilters.filters(),
stats,
featureToggles());
try (NeptuneGremlinClient client = NeptuneGremlinClient.create(cluster, serialization.config());
GraphTraversalSource g = client.newTraversalSource()) {
ExportPropertyGraphJob exportJob = new ExportPropertyGraphJob(
exportSpecifications,
graphSchema,
g,
new PropertyGraphRangeModule().config(),
gremlinFilters.filters(),
cluster.concurrencyConfig(),
targetConfig, featureToggles(),
getMaxFileDescriptorCount()
);
graphSchema = exportJob.execute();
configFileResource.save(graphSchema, false);
statsFileResource.save(stats, graphSchema);
}
directories.writeRootDirectoryPathAsMessage(target.description(), target);
configFileResource.writeResourcePathAsMessage(target);
System.err.println();
System.err.println(stats.formatStats(graphSchema));
directories.writeRootDirectoryPathAsReturnValue(target);
onExportComplete(directories, stats, cluster, graphSchema);
} else {
ExportStats stats = new ExportStats();
Directories directories = target.createDirectories(DirectoryStructure.Config);
JsonResource<GraphSchema, Boolean> configFileResource = directories.configFileResource();
JsonResource<ExportStats, GraphSchema> statsFileResource = directories.statsFileResource();
Collection<ExportSpecification> exportSpecifications = scope.exportSpecifications(
stats,
gremlinFilters.filters(),
featureToggles());
try (NeptuneGremlinClient client = NeptuneGremlinClient.create(cluster, serialization.config());
GraphTraversalSource g = client.newTraversalSource()) {
CreateGraphSchemaCommand createGraphSchemaCommand = sampling.createSchemaCommand(exportSpecifications, g);
GraphSchema graphSchema = createGraphSchemaCommand.execute();
configFileResource.save(graphSchema, false);
statsFileResource.save(stats, graphSchema);
configFileResource.writeResourcePathAsMessage(target);
}
directories.writeConfigFilePathAsReturnValue(target);
onExportComplete(directories, stats, cluster);
}
}
});
} catch (Exception e) {
handleException(e);
}
}
}
| 4,156 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/NeptuneExportBaseCommand.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.AllowedValues;
import com.github.rvesse.airline.annotations.restrictions.Once;
public abstract class NeptuneExportBaseCommand {
@Option(name = {"--log-level"}, description = "Log level (optional, default 'error').", title = "log level")
@Once
@AllowedValues(allowedValues = {"trace", "debug", "info", "warn", "error"})
private String logLevel = "error";
}
| 4,157 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/GetClusterInfo.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.AwsCliModule;
import com.amazonaws.services.neptune.cluster.NeptuneClusterMetadata;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import org.apache.commons.lang.StringUtils;
import javax.inject.Inject;
@Command(name = "nei", description = "neptune-export cluster info", hidden = true)
public class GetClusterInfo implements Runnable {
@Inject
private AwsCliModule awsCli = new AwsCliModule();
@Option(name = {"-e", "--endpoint"}, description = "Neptune endpoint.", title = "endpoint")
@Once
private String endpoint;
@Option(name = {"--cluster-id"}, description = "Neptune cluster ID.", title = "clusterId")
@Once
private String clusterId;
@Override
public void run() {
try {
if (StringUtils.isEmpty(endpoint) && StringUtils.isEmpty(clusterId)) {
throw new IllegalArgumentException("You must supply an endpoint or cluster ID");
}
NeptuneClusterMetadata metadata = StringUtils.isNotEmpty(clusterId) ?
NeptuneClusterMetadata.createFromClusterId(clusterId, awsCli) :
NeptuneClusterMetadata.createFromClusterId(
NeptuneClusterMetadata.clusterIdFromEndpoint(endpoint), awsCli);
printClusterDetails(metadata);
} catch (Exception e) {
System.err.println("An error occurred while creating Neptune cluster info:");
e.printStackTrace();
}
}
public static void printClusterDetails(NeptuneClusterMetadata metadata) {
System.err.println();
metadata.printDetails();
}
}
| 4,158 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/RunNeptuneExportSvc.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.lambda.runtime.ClientContext;
import com.amazonaws.services.lambda.runtime.CognitoIdentity;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.LambdaLogger;
import com.amazonaws.services.neptune.export.NeptuneExportLambda;
import com.amazonaws.services.neptune.util.NotImplementedException;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
@Command(name = "nesvc", description = "neptune-export service", hidden = true)
public class RunNeptuneExportSvc extends NeptuneExportBaseCommand implements Runnable {
/**
* Same as the default value given in the CFN template at https://docs.aws.amazon.com/neptune/latest/userguide/export-service.html
*/
public static final int DEFAULT_MAX_FILE_DESCRIPTOR_COUNT = 10000;
@Option(name = {"--json"}, description = "JSON")
@Once
private String json;
@Option(name = {"--root-path"}, description = "Root directory path", hidden = true)
@Once
private String rootPath = new File("exports").getAbsolutePath();
@Option(name = {"--clean"}, description = "Clean output path before beginning an export.", hidden = true)
@Once
private boolean cleanRootPath = false;
@Option(name = {"--max-file-descriptor-count"}, description = "Maximum number of simultaneously open files.", hidden = true)
@Once
private int maxFileDescriptorCount = DEFAULT_MAX_FILE_DESCRIPTOR_COUNT;
@Override
public void run() {
InputStream input = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8));
try {
new NeptuneExportLambda(rootPath, cleanRootPath, maxFileDescriptorCount).handleRequest(input, System.out, new Context() {
@Override
public String getAwsRequestId() {
throw new NotImplementedException();
}
@Override
public String getLogGroupName() {
throw new NotImplementedException();
}
@Override
public String getLogStreamName() {
throw new NotImplementedException();
}
@Override
public String getFunctionName() {
throw new NotImplementedException();
}
@Override
public String getFunctionVersion() {
throw new NotImplementedException();
}
@Override
public String getInvokedFunctionArn() {
throw new NotImplementedException();
}
@Override
public CognitoIdentity getIdentity() {
throw new NotImplementedException();
}
@Override
public ClientContext getClientContext() {
throw new NotImplementedException();
}
@Override
public int getRemainingTimeInMillis() {
throw new NotImplementedException();
}
@Override
public int getMemoryLimitInMB() {
throw new NotImplementedException();
}
@Override
public LambdaLogger getLogger() {
return new LambdaLogger() {
@Override
public void log(String s) {
System.out.println(s);
}
@Override
public void log(byte[] bytes) {
throw new NotImplementedException();
}
};
}
});
} catch (Exception e) {
e.printStackTrace();
System.err.println("An error occurred while exporting from Neptune: " + e.getMessage());
System.exit(-1);
}
System.exit(0);
}
}
| 4,159 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/NeptuneExportCli.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportRunner;
import com.github.rvesse.airline.annotations.Alias;
import com.github.rvesse.airline.annotations.Cli;
import com.github.rvesse.airline.annotations.Parser;
import com.github.rvesse.airline.help.Help;
@Cli(name = "neptune-export.sh",
description = "Export Neptune to CSV or JSON",
defaultCommand = Help.class,
commands = {
ExportPropertyGraph.class,
CreatePropertyGraphExportConfig.class,
ExportPropertyGraphFromConfig.class,
ExportPropertyGraphFromGremlinQueries.class,
ExportRdfGraph.class,
RunNeptuneExportSvc.class,
GetClusterInfo.class,
AddClone.class,
RemoveClone.class,
Help.class},
parserConfiguration = @Parser(aliases = {
@Alias(name = "create-config",
arguments = {"create-pg-config"}),
@Alias(name = "export",
arguments = {"export-pg"}),
@Alias(name = "export-from-config",
arguments = {"export-pg-from-config"}),
@Alias(name = "export-from-queries",
arguments = {"export-pg-from-queries"})
}))
public class NeptuneExportCli {
public static void main(String[] args) {
new NeptuneExportRunner(args).run();
}
}
| 4,160 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/NeptuneExportEventHandlerHost.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportEventHandler;
public interface NeptuneExportEventHandlerHost {
void setEventHandler(NeptuneExportEventHandler eventHandler);
}
| 4,161 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/ExportPropertyGraphFromGremlinQueries.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.*;
import com.amazonaws.services.neptune.cluster.Cluster;
import com.amazonaws.services.neptune.io.Directories;
import com.amazonaws.services.neptune.io.DirectoryStructure;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.GremlinFilters;
import com.amazonaws.services.neptune.propertygraph.NamedQueries;
import com.amazonaws.services.neptune.propertygraph.NamedQueriesCollection;
import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient;
import com.amazonaws.services.neptune.propertygraph.airline.NameQueriesTypeConverter;
import com.amazonaws.services.neptune.propertygraph.io.*;
import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.amazonaws.services.neptune.util.CheckedActivity;
import com.amazonaws.services.neptune.util.Timer;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.help.Examples;
import com.github.rvesse.airline.annotations.restrictions.Once;
import javax.inject.Inject;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@Examples(examples = {
"bin/neptune-export.sh export-pg-from-queries -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -q person=\"g.V().hasLabel('Person').has('birthday', lt('1985-01-01')).project('id', 'first_name', 'last_name', 'birthday').by(id).by('firstName').by('lastName').by('birthday');g.V().hasLabel('Person').has('birthday', gte('1985-01-01')).project('id', 'first_name', 'last_name', 'birthday').by(id).by('firstName').by('lastName').by('birthday')\" -q post=\"g.V().hasLabel('Post').has('imageFile').range(0, 250000).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id());g.V().hasLabel('Post').has('imageFile').range(250000, 500000).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id());g.V().hasLabel('Post').has('imageFile').range(500000, 750000).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id());g.V().hasLabel('Post').has('imageFile').range(750000, -1).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id())\" --concurrency 6",
"bin/neptune-export.sh export-pg-from-queries -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -q person=\"g.V().hasLabel('Person').has('birthday', lt('1985-01-01')).project('id', 'first_name', 'last_name', 'birthday').by(id).by('firstName').by('lastName').by('birthday');g.V().hasLabel('Person').has('birthday', gte('1985-01-01')).project('id', 'first_name', 'last_name', 'birthday').by(id).by('firstName').by('lastName').by('birthday')\" -q post=\"g.V().hasLabel('Post').has('imageFile').range(0, 250000).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id());g.V().hasLabel('Post').has('imageFile').range(250000, 500000).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id());g.V().hasLabel('Post').has('imageFile').range(500000, 750000).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id());g.V().hasLabel('Post').has('imageFile').range(750000, -1).project('id', 'image_file', 'creation_date', 'creator_id').by(id).by('imageFile').by('creationDate').by(in('CREATED').id())\" --concurrency 6 --format json"},
descriptions = {
"Parallel export of Person data in 2 shards, sharding on the 'birthday' property, and Post data in 4 shards, sharding on range, using 6 threads",
"Parallel export of Person data and Post data as JSON"
})
@Command(name = "export-pg-from-queries", description = "Export property graph to CSV or JSON from Gremlin queries.")
public class ExportPropertyGraphFromGremlinQueries extends NeptuneExportCommand implements Runnable {
@Inject
private CloneClusterModule cloneStrategy = new CloneClusterModule();
@Inject
private CommonConnectionModule connection = new CommonConnectionModule(awsCli);
@Inject
private PropertyGraphTargetModule target = new PropertyGraphTargetModule();
@Inject
private PropertyGraphConcurrencyModule concurrency = new PropertyGraphConcurrencyModule();
@Inject
private PropertyGraphSerializationModule serialization = new PropertyGraphSerializationModule();
@Inject
private PropertyGraphScopeModule scope = new PropertyGraphScopeModule();
@Option(name = {"-q", "--queries", "--query", "--gremlin"}, description = "Gremlin queries (format: name=\"semi-colon-separated list of queries\" OR \"semi-colon-separated list of queries\").",
arity = 1, typeConverterProvider = NameQueriesTypeConverter.class)
private List<NamedQueries> queries = new ArrayList<>();
@Option(name = {"-f", "--queries-file"}, description = "Path to JSON queries file (file path, or 'https' or 's3' URI).")
@Once
private URI queriesFile;
@Option(name = {"--two-pass-analysis"}, description = "Perform two-pass analysis of query results (optional, default 'false').")
@Once
private boolean twoPassAnalysis = false;
@Option(name = {"--include-type-definitions"}, description = "Include type definitions from column headers (optional, default 'false').")
@Once
private boolean includeTypeDefinitions = false;
@Option(name = {"--timeout-millis"}, description = "Query timeout in milliseconds (optional).")
@Once
private Long timeoutMillis = null;
@Option(name = {"--structured-output"}, description = "Enables schema generation. When combined with \"--format csv\", CSV will be structured" +
"according to schema.")
@Once
private boolean structuredOutput = false;
@Override
public void run() {
try {
Timer.timedActivity("exporting property graph from queries", (CheckedActivity.Runnable) () -> {
try (Cluster cluster = cloneStrategy.cloneCluster(
connection.clusterMetadata(),
connection.config(),
concurrency.config(),
featureToggles())) {
Directories directories = initDirectories();
JsonResource<NamedQueriesCollection, Object> queriesResource = queriesFile != null ?
new JsonResource<>("Queries file", queriesFile, NamedQueriesCollection.class) :
directories.queriesResource();
CsvPrinterOptions csvPrinterOptions = CsvPrinterOptions.builder().setIncludeTypeDefinitions(includeTypeDefinitions).build();
JsonPrinterOptions jsonPrinterOptions = JsonPrinterOptions.builder().setStrictCardinality(true).build();
PropertyGraphTargetConfig targetConfig = target.config(directories, new PrinterOptions(csvPrinterOptions, jsonPrinterOptions));
NamedQueriesCollection namedQueries = getNamedQueriesCollection(queries, queriesFile, queriesResource);
GraphSchema graphSchema = new GraphSchema();
ExportStats exportStats = new ExportStats();
Collection<ExportSpecification> exportSpecifications = scope.exportSpecifications(
graphSchema,
GremlinFilters.EMPTY,
exportStats,
featureToggles());
if (!structuredOutput) {
directories.createResultsSubdirectories(namedQueries.names());
}
try (NeptuneGremlinClient client = NeptuneGremlinClient.create(cluster, serialization.config());
NeptuneGremlinClient.QueryClient queryClient = client.queryClient()) {
QueryJob queryJob = new QueryJob(
namedQueries.flatten(),
queryClient,
cluster.concurrencyConfig(),
targetConfig,
twoPassAnalysis,
timeoutMillis,
exportSpecifications,
featureToggles(),
structuredOutput);
queryJob.execute();
}
directories.writeResultsDirectoryPathAsMessage(target.description(), target);
queriesResource.writeResourcePathAsMessage(target);
directories.writeRootDirectoryPathAsReturnValue(target);
onExportComplete(directories, exportStats, cluster);
}
});
} catch (Exception e) {
handleException(e);
}
}
private Directories initDirectories() throws IOException {
if (structuredOutput) {
return target.createDirectories();
}
return target.createDirectories(DirectoryStructure.GremlinQueries);
}
private NamedQueriesCollection getNamedQueriesCollection(List<NamedQueries> queries,
URI queriesFile,
JsonResource<NamedQueriesCollection, Object> queriesResource) throws IOException {
if (queriesFile == null) {
NamedQueriesCollection namedQueries = new NamedQueriesCollection(queries);
queriesResource.save(namedQueries, null);
return namedQueries;
} else {
return queriesResource.get();
}
}
}
| 4,162 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/ExportPropertyGraphFromConfig.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.*;
import com.amazonaws.services.neptune.cluster.Cluster;
import com.amazonaws.services.neptune.io.Directories;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient;
import com.amazonaws.services.neptune.propertygraph.io.ExportPropertyGraphJob;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphTargetConfig;
import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.amazonaws.services.neptune.util.CheckedActivity;
import com.amazonaws.services.neptune.util.Timer;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.help.Examples;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import javax.inject.Inject;
import java.util.Collection;
@Examples(examples = {
"bin/neptune-export.sh export-pg-from-config -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -c /home/ec2-user/config.json -d /home/ec2-user/output",
"bin/neptune-export.sh export-pg-from-config -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -c /home/ec2-user/config.json -d /home/ec2-user/output --format json"
}, descriptions = {
"Export data using the schema config in /home/ec2-user/config.json",
"Export data as JSON using the schema config in /home/ec2-user/config.json"
})
@Command(name = "export-pg-from-config", description = "Export property graph from Neptune to CSV or JSON using an existing schema config file.")
public class ExportPropertyGraphFromConfig extends NeptuneExportCommand implements Runnable {
@Inject
private CloneClusterModule cloneStrategy = new CloneClusterModule();
@Inject
private CommonConnectionModule connection = new CommonConnectionModule(awsCli);
@Inject
private PropertyGraphScopeModule scope = new PropertyGraphScopeModule();
@Inject
private PropertyGraphTargetModule target = new PropertyGraphTargetModule();
@Inject
private PropertyGraphConcurrencyModule concurrency = new PropertyGraphConcurrencyModule();
@Inject
private PropertyGraphSerializationModule serialization = new PropertyGraphSerializationModule();
@Inject
private PropertyGraphRangeModule range = new PropertyGraphRangeModule();
@Inject
private GraphSchemaProviderModule graphSchemaProvider = new GraphSchemaProviderModule(true);
@Inject
private PrinterOptionsModule printerOptions = new PrinterOptionsModule();
@Inject
private GremlinFiltersModule gremlinFilters = new GremlinFiltersModule();
@Override
public void run() {
try {
Timer.timedActivity("exporting property graph from config", (CheckedActivity.Runnable) () -> {
try (Cluster cluster = cloneStrategy.cloneCluster(
connection.clusterMetadata(),
connection.config(),
concurrency.config(),
featureToggles())) {
Directories directories = target.createDirectories();
JsonResource<GraphSchema, Boolean> configFileResource = directories.configFileResource();
JsonResource<ExportStats, GraphSchema> statsFileResource = directories.statsFileResource();
GraphSchema graphSchema = graphSchemaProvider.graphSchema();
ExportStats stats = new ExportStats();
PropertyGraphTargetConfig targetConfig = target.config(directories, printerOptions.config());
Collection<ExportSpecification> exportSpecifications = scope.exportSpecifications(
graphSchema,
gremlinFilters.filters(),
stats,
featureToggles());
try (NeptuneGremlinClient client = NeptuneGremlinClient.create(cluster, serialization.config());
GraphTraversalSource g = client.newTraversalSource()) {
ExportPropertyGraphJob exportJob = new ExportPropertyGraphJob(
exportSpecifications,
graphSchema,
g,
range.config(),
gremlinFilters.filters(),
cluster.concurrencyConfig(),
targetConfig, featureToggles(),
getMaxFileDescriptorCount()
);
graphSchema = exportJob.execute();
configFileResource.save(graphSchema, false);
statsFileResource.save(stats, graphSchema);
}
directories.writeRootDirectoryPathAsMessage(target.description(), target);
configFileResource.writeResourcePathAsMessage(target);
System.err.println();
System.err.println(stats.formatStats(graphSchema));
directories.writeRootDirectoryPathAsReturnValue(target);
onExportComplete(directories, stats, cluster, graphSchema);
}
});
} catch (Exception e) {
handleException(e);
}
}
}
| 4,163 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/AddClone.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.AwsCliModule;
import com.amazonaws.services.neptune.cluster.AddCloneTask;
import com.amazonaws.services.neptune.cluster.NeptuneClusterMetadata;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.AllowedValues;
import com.github.rvesse.airline.annotations.restrictions.Once;
import com.github.rvesse.airline.annotations.restrictions.Required;
import com.github.rvesse.airline.annotations.restrictions.ranges.IntegerRange;
import javax.inject.Inject;
import java.util.UUID;
@Command(name = "add-clone", description = "Clone an Amazon Neptune database cluster.")
public class AddClone implements Runnable {
@Inject
private AwsCliModule awsCli = new AwsCliModule();
@Option(name = {"--source-cluster-id"}, description = "Cluster ID of the source Amazon Neptune database cluster.")
@Required
@Once
private String sourceClusterId;
@Option(name = {"--clone-cluster-id"}, description = "Cluster ID of the cloned Amazon Neptune database cluster.")
@Once
private String targetClusterId = String.format("neptune-export-cluster-%s", UUID.randomUUID().toString().substring(0, 5));
@Option(name = {"--clone-cluster-instance-type"}, description = "Instance type for cloned cluster (by default neptune-export will use the same instance type as the source cluster).")
@Once
@AllowedValues(allowedValues = {
"db.r4.large",
"db.r4.xlarge",
"db.r4.2xlarge",
"db.r4.4xlarge",
"db.r4.8xlarge",
"db.r5.large",
"db.r5.xlarge",
"db.r5.2xlarge",
"db.r5.4xlarge",
"db.r5.8xlarge",
"db.r5.12xlarge",
"db.r5.16xlarge",
"db.r5.24xlarge",
"db.r5d.large",
"db.r5d.xlarge",
"db.r5d.2xlarge",
"db.r5d.4xlarge",
"db.r5d.8xlarge",
"db.r5d.12xlarge",
"db.r5d.16xlarge",
"db.r5d.24xlarge",
"db.r6g.large",
"db.r6g.xlarge",
"db.r6g.2xlarge",
"db.r6g.4xlarge",
"db.r6g.8xlarge",
"db.r6g.12xlarge",
"db.r6g.16xlarge",
"db.x2g.large",
"db.x2g.xlarge",
"db.x2g.2xlarge",
"db.x2g.4xlarge",
"db.x2g.8xlarge",
"db.x2g.12xlarge",
"db.x2g.16xlarge",
"db.t3.medium",
"db.t4g.medium",
"r4.large",
"r4.xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r5.large",
"r5.xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5d.large",
"r5d.xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r6g.large",
"r6g.xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"x2g.large",
"x2g.xlarge",
"x2g.2xlarge",
"x2g.4xlarge",
"x2g.8xlarge",
"x2g.12xlarge",
"x2g.16xlarge",
"t3.medium",
"t4g.medium"})
private String cloneClusterInstanceType;
@Option(name = {"--clone-cluster-replica-count"}, description = "Number of read replicas to add to the cloned cluster (default, 0).")
@Once
@IntegerRange(min = 0, minInclusive = true, max = 15, maxInclusive = true)
private int replicaCount = 0;
@Option(name = {"--clone-cluster-engine-version"}, description = "Cloned cluster Neptune engine version (default, latest).", hidden = true)
@Once
private String engineVersion;
@Option(name = {"--clone-cluster-correlation-id"}, description = "Correlation ID to be added to a correlation-id tag on the cloned cluster.")
@Once
private String cloneCorrelationId;
@Override
public void run() {
try {
AddCloneTask addCloneTask = new AddCloneTask(sourceClusterId, targetClusterId, cloneClusterInstanceType, replicaCount, engineVersion, awsCli, cloneCorrelationId);
NeptuneClusterMetadata clusterMetadata = addCloneTask.execute();
GetClusterInfo.printClusterDetails(clusterMetadata);
System.out.println(clusterMetadata.clusterId());
} catch (Exception e) {
System.err.println("An error occurred while cloning an Amazon Neptune database cluster:");
e.printStackTrace();
}
}
}
| 4,164 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/NeptuneExportCommand.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.AwsCliModule;
import com.amazonaws.services.neptune.cli.FeatureToggleModule;
import com.amazonaws.services.neptune.cli.ProfilesModule;
import com.amazonaws.services.neptune.cluster.Cluster;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.export.NeptuneExportEventHandler;
import com.amazonaws.services.neptune.io.Directories;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException;
import javax.inject.Inject;
public abstract class NeptuneExportCommand extends NeptuneExportBaseCommand implements NeptuneExportEventHandler, NeptuneExportEventHandlerHost {
@Inject
protected AwsCliModule awsCli = new AwsCliModule();
@Inject
private FeatureToggleModule featureToggleModule = new FeatureToggleModule();
@Inject
private ProfilesModule profilesModule = new ProfilesModule();
private boolean isCliInvocation = false;
private int maxFileDescriptorCount;
private NeptuneExportEventHandler eventHandler = NeptuneExportEventHandler.NULL_EVENT_HANDLER;
@Override
public void setEventHandler(NeptuneExportEventHandler eventHandler) {
this.eventHandler = eventHandler;
}
public void setIsCliInvocation(boolean isCliInvocation) {
this.isCliInvocation = isCliInvocation;
}
public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception {
eventHandler.onExportComplete(directories, stats, cluster, graphSchema);
}
public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception {
eventHandler.onExportComplete(directories, stats, cluster);
}
public void onError() {
eventHandler.onError();
}
void handleException(Throwable e) {
if (e.getCause() != null && RemoteConnectionException.class.isAssignableFrom(e.getCause().getClass())) {
e.printStackTrace();
System.err.println("An error occurred while connecting to Neptune. " +
"Ensure you have not disabled SSL if the database requires SSL in transit. " +
"Ensure you have specified the --use-iam-auth flag (and set the SERVICE_REGION environment variable if running in your own environment) if the database uses IAM database authentication. " +
"Ensure the database's VPC security group(s) allow access from the export tool.");
} else {
e.printStackTrace();
onError();
System.err.println("An error occurred while exporting from Neptune: " + e.getMessage());
}
if (isCliInvocation) {
System.exit(-1);
}
}
FeatureToggles featureToggles() {
return featureToggleModule.featureToggles();
}
public int getMaxFileDescriptorCount() {
return maxFileDescriptorCount;
}
public void setMaxFileDescriptorCount(int maxFileDescriptorCount) {
this.maxFileDescriptorCount = maxFileDescriptorCount;
}
}
| 4,165 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/ExportRdfGraph.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.*;
import com.amazonaws.services.neptune.cluster.Cluster;
import com.amazonaws.services.neptune.cluster.ConcurrencyConfig;
import com.amazonaws.services.neptune.cluster.EventId;
import com.amazonaws.services.neptune.cluster.GetLastEventIdStrategy;
import com.amazonaws.services.neptune.io.Directories;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import com.amazonaws.services.neptune.rdf.NeptuneSparqlClient;
import com.amazonaws.services.neptune.rdf.ExportRdfJob;
import com.amazonaws.services.neptune.util.CheckedActivity;
import com.amazonaws.services.neptune.util.Timer;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.help.Examples;
import javax.inject.Inject;
@Examples(examples = {
"bin/neptune-export.sh export-rdf -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output "},
descriptions = {
"Export all data to the /home/ec2-user/output directory"
})
@Command(name = "export-rdf", description = "Export RDF graph from Neptune.")
public class ExportRdfGraph extends NeptuneExportCommand implements Runnable {
@Inject
private CloneClusterModule cloneStrategy = new CloneClusterModule();
@Inject
private CommonConnectionModule connection = new CommonConnectionModule(awsCli);
@Inject
private RdfTargetModule target = new RdfTargetModule();
@Inject
private RdfExportScopeModule exportScope = new RdfExportScopeModule();
@Inject
private NeptuneStreamsModule streams = new NeptuneStreamsModule();
@Override
public void run() {
try {
Timer.timedActivity(String.format("exporting rdf %s", exportScope.scope()), (CheckedActivity.Runnable) () -> {
try (Cluster cluster = cloneStrategy.cloneCluster(
connection.clusterMetadata(),
connection.config(),
new ConcurrencyConfig(1),
featureToggles())) {
Directories directories = target.createDirectories();
JsonResource<EventId, Object> eventIdFileResource = directories.lastEventIdFileResource();
GetLastEventIdStrategy getLastEventIdStrategy = streams.lastEventIdStrategy(cluster, eventIdFileResource);
getLastEventIdStrategy.saveLastEventId("sparql");
try (NeptuneSparqlClient client = NeptuneSparqlClient.create(cluster.connectionConfig(), featureToggles())) {
ExportRdfJob job = exportScope.createJob(client, target.config(directories));
job.execute();
}
directories.writeRootDirectoryPathAsReturnValue(target);
getLastEventIdStrategy.writeLastEventIdResourcePathAsMessage(target);
onExportComplete(directories, new ExportStats(), cluster);
}
});
} catch (Exception e) {
handleException(e);
}
}
}
| 4,166 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/RemoveClone.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.AwsCliModule;
import com.amazonaws.services.neptune.cluster.GetClusterIdFromCorrelationId;
import com.amazonaws.services.neptune.cluster.NeptuneClusterMetadata;
import com.amazonaws.services.neptune.cluster.RemoveCloneTask;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import com.github.rvesse.airline.annotations.restrictions.RequireOnlyOne;
import org.apache.commons.lang.StringUtils;
import javax.inject.Inject;
@Command(name = "remove-clone", description = "Remove a cloned Amazon Neptune database cluster.")
public class RemoveClone implements Runnable {
@Inject
private AwsCliModule awsCli = new AwsCliModule();
@Option(name = {"--clone-cluster-id"}, description = "Cluster ID of the cloned Amazon Neptune database cluster.")
@RequireOnlyOne(tag = "cloneClusterIdOrCorrelationId")
@Once
private String cloneClusterId;
@Option(name = {"--clone-cluster-correlation-id"}, description = "Value of the correlation-id tag on an Amazon Neptune cloned cluster that you want to remove.")
@RequireOnlyOne(tag = "cloneClusterIdOrCorrelationId")
@Once
private String correlationId;
@Override
public void run() {
if (StringUtils.isEmpty(cloneClusterId) && StringUtils.isNotEmpty(correlationId)) {
cloneClusterId = new GetClusterIdFromCorrelationId(correlationId, awsCli).execute();
if (StringUtils.isEmpty(cloneClusterId)) {
System.err.println(String.format("Unable to get a cloned Amazon Neptune database cluster ID for correlation ID %s", correlationId));
System.exit(0);
}
}
try {
new RemoveCloneTask(NeptuneClusterMetadata.createFromClusterId(cloneClusterId, awsCli)).execute();
} catch (Exception e) {
System.err.println("An error occurred while removing a cloned Amazon Neptune database cluster:");
e.printStackTrace();
System.exit(-1);
}
}
}
| 4,167 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/ExportPropertyGraph.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.cli.*;
import com.amazonaws.services.neptune.cluster.Cluster;
import com.amazonaws.services.neptune.cluster.EventId;
import com.amazonaws.services.neptune.cluster.GetLastEventIdStrategy;
import com.amazonaws.services.neptune.io.Directories;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient;
import com.amazonaws.services.neptune.propertygraph.io.ExportPropertyGraphJob;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphTargetConfig;
import com.amazonaws.services.neptune.propertygraph.schema.ExportSpecification;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.amazonaws.services.neptune.util.CheckedActivity;
import com.amazonaws.services.neptune.util.Timer;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.help.Examples;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import javax.inject.Inject;
import java.util.Collection;
@Examples(examples = {
"bin/neptune-export.sh export-pg -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output",
"bin/neptune-export.sh export-pg -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output --format json",
"bin/neptune-export.sh export-pg -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -s nodes",
"bin/neptune-export.sh export-pg -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -nl User -el FOLLOWS",
"bin/neptune-export.sh export-pg -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -cn 2",
"bin/neptune-export.sh export-pg -e neptunedbcluster-xxxxxxxxxxxx.cluster-yyyyyyyyyyyy.us-east-1.neptune.amazonaws.com -d /home/ec2-user/output -cn 2 -r 1000"
}, descriptions = {
"Export all data to the /home/ec2-user/output directory",
"Export all data to the /home/ec2-user/output directory as JSON",
"Export only nodes to the /home/ec2-user/output directory",
"Export only User nodes and FOLLOWS relationships",
"Parallel export using 2 threads",
"Parallel export using 2 threads, with each thread processing batches of 1000 nodes or edges"
})
@Command(name = "export-pg", description = "Export property graph from Neptune to CSV or JSON.")
public class ExportPropertyGraph extends NeptuneExportCommand implements Runnable {
@Inject
private CloneClusterModule cloneStrategy = new CloneClusterModule();
@Inject
private CommonConnectionModule connection = new CommonConnectionModule(awsCli);
@Inject
private PropertyGraphScopeModule scope = new PropertyGraphScopeModule();
@Inject
private PropertyGraphTargetModule target = new PropertyGraphTargetModule();
@Inject
private PropertyGraphConcurrencyModule concurrency = new PropertyGraphConcurrencyModule();
@Inject
private PropertyGraphSerializationModule serialization = new PropertyGraphSerializationModule();
@Inject
private PropertyGraphRangeModule range = new PropertyGraphRangeModule();
@Inject
private GraphSchemaProviderModule graphSchemaProvider = new GraphSchemaProviderModule(false);
@Inject
private PrinterOptionsModule printerOptions = new PrinterOptionsModule();
@Inject
private GremlinFiltersModule gremlinFilters = new GremlinFiltersModule();
@Inject
private NeptuneStreamsModule streams = new NeptuneStreamsModule();
@Override
public void run() {
try {
Timer.timedActivity("exporting property graph", (CheckedActivity.Runnable) () -> {
try (Cluster cluster = cloneStrategy.cloneCluster(
connection.clusterMetadata(),
connection.config(),
concurrency.config(),
featureToggles())) {
Directories directories = target.createDirectories();
JsonResource<GraphSchema, Boolean> configFileResource = directories.configFileResource();
JsonResource<EventId, Object> eventIdFileResource = directories.lastEventIdFileResource();
JsonResource<ExportStats, GraphSchema> statsFileResource = directories.statsFileResource();
GetLastEventIdStrategy getLastEventIdStrategy = streams.lastEventIdStrategy(cluster, eventIdFileResource);
getLastEventIdStrategy.saveLastEventId("gremlin");
GraphSchema graphSchema = graphSchemaProvider.graphSchema();
ExportStats stats = new ExportStats();
PropertyGraphTargetConfig targetConfig = target.config(directories, printerOptions.config());
Collection<ExportSpecification> exportSpecifications = scope.exportSpecifications(
graphSchema,
gremlinFilters.filters(),
stats,
featureToggles());
try (NeptuneGremlinClient client = NeptuneGremlinClient.create(cluster, serialization.config());
GraphTraversalSource g = client.newTraversalSource()) {
ExportPropertyGraphJob exportJob = new ExportPropertyGraphJob(
exportSpecifications,
graphSchema,
g,
range.config(),
gremlinFilters.filters(),
cluster.concurrencyConfig(),
targetConfig,
featureToggles(),
getMaxFileDescriptorCount()
);
graphSchema = Timer.timedActivity(
"export",
(CheckedActivity.Callable<GraphSchema>) exportJob::execute);
configFileResource.save(graphSchema, false);
statsFileResource.save(stats, graphSchema);
}
directories.writeRootDirectoryPathAsMessage(target.description(), target);
configFileResource.writeResourcePathAsMessage(target);
getLastEventIdStrategy.writeLastEventIdResourcePathAsMessage(target);
System.err.println();
System.err.println(stats.formatStats(graphSchema));
directories.writeRootDirectoryPathAsReturnValue(target);
onExportComplete(directories, stats, cluster, graphSchema);
}
});
} catch (Exception e) {
handleException(e);
}
}
}
| 4,168 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/SimulatedCloneCluster.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
public class SimulatedCloneCluster implements CloneClusterStrategy {
private final NeptuneClusterMetadata clusterMetadata;
public SimulatedCloneCluster(NeptuneClusterMetadata clusterMetadata) {
this.clusterMetadata = clusterMetadata;
}
@Override
public Cluster cloneCluster(ConnectionConfig connectionConfig, ConcurrencyConfig concurrencyConfig) throws Exception {
System.err.println("Simulating creating cloned cluster (original cluster will be used)...");
return new Cluster() {
@Override
public ConnectionConfig connectionConfig() {
return connectionConfig;
}
@Override
public ConcurrencyConfig concurrencyConfig() {
return concurrencyConfig;
}
@Override
public NeptuneClusterMetadata clusterMetadata() {
return clusterMetadata;
}
@Override
public void close() throws Exception {
System.err.println("Simulating deleting cloned cluster (original cluster will not be deleted)...");
}
};
}
}
| 4,169 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/SimpleResponseHandler.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.http.HttpResponseHandler;
import java.io.InputStream;
import java.util.Scanner;
class SimpleResponseHandler implements HttpResponseHandler<HttpResponse> {
@Override
public HttpResponse handle(com.amazonaws.http.HttpResponse response) {
int status = response.getStatusCode();
String content = null;
InputStream stream = response.getContent();
if (stream != null){
Scanner s = new Scanner(stream).useDelimiter("\\A");
content = s.hasNext() ? s.next() : "";
}
if (status < 200 || status >= 300) {
AmazonServiceException ase = new AmazonServiceException(content);
ase.setStatusCode(status);
throw ase;
}
String contentType = response.getHeaderValues("content-type").get(0);
return new HttpResponse(status, content, contentType);
}
@Override
public boolean needsConnectionLeftOpen() {
return false;
}
}
| 4,170 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/ProxyConfig.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
public class ProxyConfig
{
private final String endpoint;
private final int port;
private final boolean removeHostHeader;
public ProxyConfig(String endpoint, int port, boolean removeHostHeader) {
this.endpoint = endpoint;
this.port = port;
this.removeHostHeader = removeHostHeader;
}
public String endpoint() {
return endpoint;
}
public int port() {
return port;
}
public boolean removeHostHeader() {
return removeHostHeader;
}
@Override
public String toString() {
return "ProxyConfig{" +
"endpoint='" + endpoint + '\'' +
", port=" + port +
", removeHostHeader=" + removeHostHeader +
'}';
}
}
| 4,171 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/CloneCluster.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.AmazonNeptune;
import java.util.UUID;
import java.util.function.Supplier;
public class CloneCluster implements CloneClusterStrategy {
private final NeptuneClusterMetadata originalClusterMetadata;
private final String cloneClusterInstanceType;
private final int replicaCount;
private final int maxConcurrency;
private final String engineVersion;
private final String cloneCorrelationId;
public CloneCluster(NeptuneClusterMetadata originalClusterMetadata,
String cloneClusterInstanceType,
int replicaCount,
int maxConcurrency,
String engineVersion,
String cloneCorrelationId) {
this.originalClusterMetadata = originalClusterMetadata;
this.cloneClusterInstanceType = cloneClusterInstanceType;
this.replicaCount = replicaCount;
this.maxConcurrency = maxConcurrency;
this.engineVersion = engineVersion;
this.cloneCorrelationId = cloneCorrelationId;
}
@Override
public Cluster cloneCluster(ConnectionConfig connectionConfig, ConcurrencyConfig concurrencyConfig) throws Exception {
if (!connectionConfig.isDirectConnection()) {
throw new IllegalStateException("neptune-export does not support cloning a Neptune cluster accessed via a load balancer");
}
String clusterId = originalClusterMetadata.clusterId();
String targetClusterId = String.format("neptune-export-cluster-%s", UUID.randomUUID().toString().substring(0, 5));
AddCloneTask addCloneTask = new AddCloneTask(
clusterId,
targetClusterId,
cloneClusterInstanceType,
replicaCount,
engineVersion,
originalClusterMetadata.clientSupplier(),
cloneCorrelationId);
NeptuneClusterMetadata targetClusterMetadata = addCloneTask.execute();
InstanceType instanceType = InstanceType.parse(
targetClusterMetadata.instanceMetadataFor(targetClusterMetadata.primary()).instanceType());
int targetConcurrency = instanceType.concurrency() * (1 + replicaCount);
int newConcurrency = maxConcurrency > 0 ?
Math.min(maxConcurrency, targetConcurrency) :
targetConcurrency;
System.err.println();
System.err.println(String.format("Endpoints : %s", String.join(", ", targetClusterMetadata.endpoints())));
System.err.println(String.format("Max concurrency : %s", maxConcurrency));
System.err.println(String.format("Concurrency : %s", newConcurrency));
return new ClonedCluster(
new ConnectionConfig(
targetClusterId,
targetClusterMetadata.endpoints(),
connectionConfig.port(),
targetClusterMetadata.isIAMDatabaseAuthenticationEnabled(),
true,
connectionConfig.proxyConfig(),
connectionConfig.getCredentialsProvider()
),
new ConcurrencyConfig(newConcurrency),
targetClusterMetadata
);
}
private static class ClonedCluster implements Cluster {
private final ConnectionConfig connectionConfig;
private final ConcurrencyConfig concurrencyConfig;
private final NeptuneClusterMetadata clusterMetadata;
private ClonedCluster(ConnectionConfig connectionConfig,
ConcurrencyConfig concurrencyConfig,
NeptuneClusterMetadata clusterMetadata) {
this.connectionConfig = connectionConfig;
this.concurrencyConfig = concurrencyConfig;
this.clusterMetadata = clusterMetadata;
}
@Override
public ConnectionConfig connectionConfig() {
return connectionConfig;
}
@Override
public ConcurrencyConfig concurrencyConfig() {
return concurrencyConfig;
}
@Override
public NeptuneClusterMetadata clusterMetadata() {
return clusterMetadata;
}
@Override
public void close() throws Exception {
RemoveCloneTask removeCloneTask = new RemoveCloneTask(clusterMetadata);
removeCloneTask.execute();
}
}
}
| 4,172 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/DoNotGetLastEventIdTask.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.io.CommandWriter;
import java.io.IOException;
public class DoNotGetLastEventIdTask implements GetLastEventIdStrategy {
@Override
public void saveLastEventId(String streamEndpointType) throws IOException {
// Do nothing
}
@Override
public void writeLastEventIdResourcePathAsMessage(CommandWriter writer) {
// Do nothing
}
}
| 4,173 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/GetLastEventId.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.regions.DefaultAwsRegionProviderChain;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.function.ToIntFunction;
import java.util.stream.Collectors;
public class GetLastEventId {
public static final String MaxCommitNumValueForEngine(String engineVersion){
List<Integer> parts = Arrays.stream(engineVersion.split("\\."))
.mapToInt(Integer::valueOf)
.boxed()
.collect(Collectors.toList());
if (parts.get(1) == 0){
if (parts.get(2) < 4){
return String.valueOf(Integer.MAX_VALUE);
} else if (parts.get(2) == 4 && parts.get(3) < 2){
return String.valueOf(Integer.MAX_VALUE);
} else {
return String.valueOf(Long.MAX_VALUE);
}
} else {
return String.valueOf(Long.MAX_VALUE);
}
}
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GetLastEventId.class);
private final NeptuneClusterMetadata clusterMetadata;
private final ConnectionConfig connectionConfig;
private final String streamEndpointType;
public GetLastEventId(NeptuneClusterMetadata clusterMetadata, ConnectionConfig connectionConfig, String streamEndpointType) {
this.clusterMetadata = clusterMetadata;
this.connectionConfig = connectionConfig;
this.streamEndpointType = streamEndpointType;
}
public EventId execute() {
if (!clusterMetadata.isStreamEnabled()){
return null;
}
String endpoint = connectionConfig.endpoints().iterator().next();
String streamsEndpoint = String.format("https://%s:%s/%s/stream", endpoint, connectionConfig.port(), streamEndpointType);
logger.info("Streams endpoint: {}", streamsEndpoint);
try {
String region = new DefaultAwsRegionProviderChain().getRegion();
NeptuneHttpsClient neptuneHttpsClient = new NeptuneHttpsClient(streamsEndpoint, region, endpoint.equals("localhost"));
Map<String, String> params = new HashMap<>();
params.put("commitNum", MaxCommitNumValueForEngine(clusterMetadata.engineVersion()));
params.put("limit", "1");
HttpResponse httpResponse = neptuneHttpsClient.get(params);
logger.info(httpResponse.getContent());
return null;
} catch (AmazonServiceException e) {
if (e.getErrorCode().equals("StreamRecordsNotFoundException")) {
EventId lastEventId = StreamRecordsNotFoundExceptionParser.parseLastEventId(e.getErrorMessage());
logger.info("LastEventId: {}", lastEventId);
return lastEventId;
} else {
logger.error("Error while accessing Neptune Streams endpoint", e);
return null;
}
} catch (Exception e) {
logger.error("Error while accessing Neptune Streams endpoint", e);
return null;
}
}
}
| 4,174 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/GetLastEventIdStrategy.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.io.CommandWriter;
import java.io.IOException;
public interface GetLastEventIdStrategy {
void saveLastEventId(String streamEndpointType) throws IOException;
void writeLastEventIdResourcePathAsMessage(CommandWriter writer);
}
| 4,175 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/SimpleErrorResponseHandler.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.http.HttpResponse;
import com.amazonaws.http.HttpResponseHandler;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.InputStream;
import java.util.Scanner;
public class SimpleErrorResponseHandler implements HttpResponseHandler<AmazonServiceException> {
private static final ObjectMapper MAPPER = new ObjectMapper();
@Override
public AmazonServiceException handle(HttpResponse response) throws Exception {
String content = null;
InputStream stream = response.getContent();
if (stream != null) {
Scanner s = new Scanner(stream).useDelimiter("\\A");
content = s.hasNext() ? s.next() : "";
}
AmazonServiceException.ErrorType errorType = AmazonServiceException.ErrorType.Unknown;
if (response.getStatusCode() >= 500){
errorType = AmazonServiceException.ErrorType.Service;
} else if (response.getStatusCode() >= 400){
errorType = AmazonServiceException.ErrorType.Client;
}
String errorCode = "UnknownError";
String message = response.getStatusText();
String requestId = "";
if (content != null){
JsonNode json = MAPPER.readTree(content);
if (json.has("requestId")){
requestId = json.path("requestId").textValue();
}
if (json.has("code")){
errorCode = json.path("code").textValue();
}
if (json.has("detailedMessage")){
message = json.path("detailedMessage").textValue();
}
}
AmazonServiceException exception = new AmazonServiceException(message);
exception.setStatusCode(response.getStatusCode());
exception.setRequestId(requestId);
exception.setErrorType(errorType);
exception.setErrorCode(errorCode);
return exception;
}
@Override
public boolean needsConnectionLeftOpen() {
return false;
}
}
| 4,176 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/RemoveCloneTask.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.AmazonNeptune;
import com.amazonaws.services.neptune.model.*;
import com.amazonaws.services.neptune.util.Activity;
import com.amazonaws.services.neptune.util.Timer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
public class RemoveCloneTask {
private final NeptuneClusterMetadata clusterMetadata;
public RemoveCloneTask(NeptuneClusterMetadata clusterMetadata) {
this.clusterMetadata = clusterMetadata;
}
public void execute() {
AmazonNeptune neptune = clusterMetadata.clientSupplier().get();
try {
Timer.timedActivity("deleting cloned cluster", false,
(Activity.Runnable) () -> deleteCluster(neptune));
} finally {
if (neptune != null) {
neptune.shutdown();
}
}
}
private void deleteCluster(AmazonNeptune neptuneClient) {
System.err.println();
System.err.println("Deleting cloned cluster " + clusterMetadata.clusterId() + "...");
if (!clusterMetadata.isTaggedWithNeptuneExport()) {
throw new IllegalStateException("Cluster must have an 'application' tag with the value '" +
NeptuneClusterMetadata.NEPTUNE_EXPORT_APPLICATION_TAG + "' before it can be deleted");
}
ExecutorService taskExecutor = Executors.newFixedThreadPool(1 + clusterMetadata.replicas().size());
taskExecutor.execute(() -> deleteInstance(neptuneClient, clusterMetadata.primary()));
for (String replicaId : clusterMetadata.replicas()) {
taskExecutor.execute(() -> deleteInstance(neptuneClient, replicaId));
}
taskExecutor.shutdown();
try {
taskExecutor.awaitTermination(30, TimeUnit.MINUTES);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
System.err.println("Deleting cluster...");
neptuneClient.deleteDBCluster(new DeleteDBClusterRequest()
.withDBClusterIdentifier(clusterMetadata.clusterId())
.withSkipFinalSnapshot(true));
try {
boolean clusterIsBeingDeleted = neptuneClient.describeDBClusters(
new DescribeDBClustersRequest().withDBClusterIdentifier(clusterMetadata.clusterId()))
.getDBClusters()
.size() > 0;
while (clusterIsBeingDeleted) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
clusterIsBeingDeleted = neptuneClient.describeDBClusters(
new DescribeDBClustersRequest().withDBClusterIdentifier(clusterMetadata.clusterId()))
.getDBClusters()
.size() > 0;
}
} catch (DBClusterNotFoundException e) {
// Do nothing
}
System.err.println("Deleting parameter groups...");
neptuneClient.deleteDBClusterParameterGroup(new DeleteDBClusterParameterGroupRequest()
.withDBClusterParameterGroupName(clusterMetadata.dbClusterParameterGroupName()));
neptuneClient.deleteDBParameterGroup(new DeleteDBParameterGroupRequest()
.withDBParameterGroupName(
clusterMetadata.instanceMetadataFor(clusterMetadata.primary()).dbParameterGroupName()));
}
private void deleteInstance(AmazonNeptune neptune, String instanceId) {
System.err.println("Deleting instance " + instanceId + "...");
neptune.deleteDBInstance(new DeleteDBInstanceRequest()
.withDBInstanceIdentifier(instanceId)
.withSkipFinalSnapshot(true));
try {
boolean instanceIsBeingDeleted = neptune.describeDBInstances(
new DescribeDBInstancesRequest().withDBInstanceIdentifier(instanceId))
.getDBInstances()
.size() > 0;
while (instanceIsBeingDeleted) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
instanceIsBeingDeleted = neptune.describeDBInstances(
new DescribeDBInstancesRequest().withDBInstanceIdentifier(instanceId))
.getDBInstances()
.size() > 0;
}
} catch (DBInstanceNotFoundException e) {
// Do nothing
}
}
}
| 4,177 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/AddCloneTask.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.AmazonNeptune;
import com.amazonaws.services.neptune.model.*;
import com.amazonaws.services.neptune.util.Activity;
import com.amazonaws.services.neptune.util.EnvironmentVariableUtils;
import com.amazonaws.services.neptune.util.Timer;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
public class AddCloneTask {
private final String sourceClusterId;
private final String targetClusterId;
private final String cloneClusterInstanceType;
private final int replicaCount;
private final String engineVersion;
private final Supplier<AmazonNeptune> amazonNeptuneClientSupplier;
private final String cloneCorrelationId;
public AddCloneTask(String sourceClusterId,
String targetClusterId,
String cloneClusterInstanceType,
int replicaCount,
String engineVersion,
Supplier<AmazonNeptune> amazonNeptuneClientSupplier,
String cloneCorrelationId) {
this.sourceClusterId = sourceClusterId;
this.targetClusterId = targetClusterId;
this.cloneClusterInstanceType = cloneClusterInstanceType;
this.replicaCount = replicaCount;
this.engineVersion = engineVersion;
this.amazonNeptuneClientSupplier = amazonNeptuneClientSupplier;
this.cloneCorrelationId = cloneCorrelationId;
}
public NeptuneClusterMetadata execute() {
return Timer.timedActivity(
"cloning cluster",
(Activity.Callable<NeptuneClusterMetadata>) this::cloneCluster);
}
private NeptuneClusterMetadata cloneCluster() {
System.err.println("Cloning cluster " + sourceClusterId + "...");
System.err.println();
NeptuneClusterMetadata sourceClusterMetadata =
NeptuneClusterMetadata.createFromClusterId(sourceClusterId, amazonNeptuneClientSupplier);
InstanceType instanceType = StringUtils.isEmpty(cloneClusterInstanceType) ?
InstanceType.parse(sourceClusterMetadata.instanceMetadataFor(sourceClusterMetadata.primary()).instanceType()) :
InstanceType.parse(cloneClusterInstanceType);
System.err.println(String.format("Source clusterId : %s", sourceClusterId));
System.err.println(String.format("Target clusterId : %s", targetClusterId));
System.err.println(String.format("Target instance type : %s", instanceType));
AmazonNeptune neptune = amazonNeptuneClientSupplier.get();
DBClusterParameterGroup dbClusterParameterGroup = Timer.timedActivity(
"creating DB cluster parameter group",
(Activity.Callable<DBClusterParameterGroup>) () ->
createDbClusterParameterGroup(sourceClusterMetadata, neptune));
DBParameterGroup dbParameterGroup = Timer.timedActivity(
"creating parameter groups",
(Activity.Callable<DBParameterGroup>) () -> createDbParameterGroup(sourceClusterMetadata, neptune));
DBCluster targetDbCluster = Timer.timedActivity(
"creating target cluster",
(Activity.Callable<DBCluster>) () ->
createCluster(sourceClusterMetadata, neptune, dbClusterParameterGroup));
Timer.timedActivity("creating primary", (Activity.Runnable) () ->
createInstance("primary",
neptune,
sourceClusterMetadata,
instanceType,
dbParameterGroup,
targetDbCluster));
if (replicaCount > 0) {
Timer.timedActivity("creating replicas", (Activity.Runnable) () ->
createReplicas(sourceClusterMetadata, instanceType, neptune, dbParameterGroup, targetDbCluster));
}
neptune.shutdown();
return NeptuneClusterMetadata.createFromClusterId(targetClusterId, amazonNeptuneClientSupplier);
}
private void createReplicas(NeptuneClusterMetadata sourceClusterMetadata,
InstanceType instanceType,
AmazonNeptune neptune,
DBParameterGroup dbParameterGroup,
DBCluster targetDbCluster) {
ExecutorService taskExecutor = Executors.newFixedThreadPool(replicaCount);
for (int i = 0; i < replicaCount; i++) {
taskExecutor.execute(() -> createInstance("replica",
neptune,
sourceClusterMetadata,
instanceType,
dbParameterGroup,
targetDbCluster));
}
taskExecutor.shutdown();
try {
taskExecutor.awaitTermination(30, TimeUnit.MINUTES);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
private DBCluster createCluster(NeptuneClusterMetadata sourceClusterMetadata,
AmazonNeptune neptune,
DBClusterParameterGroup
dbClusterParameterGroup) {
System.err.println("Creating target cluster...");
RestoreDBClusterToPointInTimeRequest cloneClusterRequest = new RestoreDBClusterToPointInTimeRequest()
.withSourceDBClusterIdentifier(sourceClusterId)
.withDBClusterIdentifier(targetClusterId)
.withRestoreType("copy-on-write")
.withUseLatestRestorableTime(true)
.withPort(sourceClusterMetadata.port())
.withDBClusterParameterGroupName(dbClusterParameterGroup.getDBClusterParameterGroupName())
.withEnableIAMDatabaseAuthentication(sourceClusterMetadata.isIAMDatabaseAuthenticationEnabled())
.withDBSubnetGroupName(sourceClusterMetadata.dbSubnetGroupName())
.withVpcSecurityGroupIds(sourceClusterMetadata.vpcSecurityGroupIds())
.withTags(getTags(sourceClusterMetadata.clusterId()));
DBCluster targetDbCluster = neptune.restoreDBClusterToPointInTime(cloneClusterRequest);
String clusterStatus = targetDbCluster.getStatus();
while (clusterStatus.equals("creating")) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
clusterStatus = neptune.describeDBClusters(
new DescribeDBClustersRequest()
.withDBClusterIdentifier(targetDbCluster.getDBClusterIdentifier()))
.getDBClusters()
.get(0)
.getStatus();
}
return targetDbCluster;
}
private Collection<Tag> getTags(String sourceClusterId) {
Collection<Tag> tags = new ArrayList<>();
tags.add(new Tag()
.withKey("source")
.withValue(sourceClusterId));
tags.add(new Tag()
.withKey("application")
.withValue(NeptuneClusterMetadata.NEPTUNE_EXPORT_APPLICATION_TAG));
if (StringUtils.isNotEmpty(cloneCorrelationId)) {
tags.add(new Tag()
.withKey(NeptuneClusterMetadata.NEPTUNE_EXPORT_CORRELATION_ID_KEY)
.withValue(cloneCorrelationId));
}
return tags;
}
private DBParameterGroup createDbParameterGroup(NeptuneClusterMetadata sourceClusterMetadata,
AmazonNeptune neptune) {
DBParameterGroup dbParameterGroup;
dbParameterGroup = neptune.createDBParameterGroup(
new CreateDBParameterGroupRequest()
.withDBParameterGroupName(String.format("%s-db-params", targetClusterId))
.withDescription(String.format("%s DB Parameter Group", targetClusterId))
.withDBParameterGroupFamily(sourceClusterMetadata.dbParameterGroupFamily())
.withTags(getTags(sourceClusterMetadata.clusterId())));
neptune.modifyDBParameterGroup(new ModifyDBParameterGroupRequest()
.withDBParameterGroupName(dbParameterGroup.getDBParameterGroupName())
.withParameters(
new Parameter()
.withParameterName("neptune_query_timeout")
.withParameterValue("2147483647")
.withApplyMethod(ApplyMethod.PendingReboot)));
List<Parameter> dbParameters = neptune.describeDBParameters(
new DescribeDBParametersRequest()
.withDBParameterGroupName(dbParameterGroup.getDBParameterGroupName()))
.getParameters();
while (dbParameters.stream().noneMatch(parameter ->
parameter.getParameterName().equals("neptune_query_timeout") &&
parameter.getParameterValue().equals("2147483647"))) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
dbParameters = neptune.describeDBClusterParameters(
new DescribeDBClusterParametersRequest()
.withDBClusterParameterGroupName(dbParameterGroup.getDBParameterGroupName()))
.getParameters();
}
System.err.println(String.format("DB parameter group : %s", dbParameterGroup.getDBParameterGroupName()));
System.err.println();
return dbParameterGroup;
}
private DBClusterParameterGroup createDbClusterParameterGroup(NeptuneClusterMetadata sourceClusterMetadata,
AmazonNeptune neptune) {
DBClusterParameterGroup dbClusterParameterGroup;
dbClusterParameterGroup = neptune.createDBClusterParameterGroup(
new CreateDBClusterParameterGroupRequest()
.withDBClusterParameterGroupName(String.format("%s-db-cluster-params", targetClusterId))
.withDescription(String.format("%s DB Cluster Parameter Group", targetClusterId))
.withDBParameterGroupFamily(sourceClusterMetadata.dbParameterGroupFamily())
.withTags(getTags(sourceClusterMetadata.clusterId())));
String neptuneStreamsParameterValue = sourceClusterMetadata.isStreamEnabled() ? "1" : "0";
try {
neptune.modifyDBClusterParameterGroup(new ModifyDBClusterParameterGroupRequest()
.withDBClusterParameterGroupName(dbClusterParameterGroup.getDBClusterParameterGroupName())
.withParameters(
new Parameter()
.withParameterName("neptune_enforce_ssl")
.withParameterValue("1")
.withApplyMethod(ApplyMethod.PendingReboot),
new Parameter()
.withParameterName("neptune_query_timeout")
.withParameterValue("2147483647")
.withApplyMethod(ApplyMethod.PendingReboot),
new Parameter()
.withParameterName("neptune_streams")
.withParameterValue(neptuneStreamsParameterValue)
.withApplyMethod(ApplyMethod.PendingReboot)));
} catch (AmazonNeptuneException e) {
neptune.modifyDBClusterParameterGroup(new ModifyDBClusterParameterGroupRequest()
.withDBClusterParameterGroupName(dbClusterParameterGroup.getDBClusterParameterGroupName())
.withParameters(
new Parameter()
.withParameterName("neptune_query_timeout")
.withParameterValue("2147483647")
.withApplyMethod(ApplyMethod.PendingReboot),
new Parameter()
.withParameterName("neptune_streams")
.withParameterValue(neptuneStreamsParameterValue)
.withApplyMethod(ApplyMethod.PendingReboot)));
}
List<Parameter> dbClusterParameters = neptune.describeDBClusterParameters(
new DescribeDBClusterParametersRequest()
.withDBClusterParameterGroupName(dbClusterParameterGroup.getDBClusterParameterGroupName()))
.getParameters();
while (dbClusterParameters.stream().noneMatch(parameter ->
parameter.getParameterName().equals("neptune_query_timeout") &&
parameter.getParameterValue().equals("2147483647"))) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
dbClusterParameters = neptune.describeDBClusterParameters(
new DescribeDBClusterParametersRequest()
.withDBClusterParameterGroupName(dbClusterParameterGroup.getDBClusterParameterGroupName()))
.getParameters();
}
System.err.println(String.format("DB cluster parameter group : %s", dbClusterParameterGroup.getDBClusterParameterGroupName()));
return dbClusterParameterGroup;
}
private void createInstance(String name,
AmazonNeptune neptune,
NeptuneClusterMetadata sourceClusterMetadata,
InstanceType instanceType,
DBParameterGroup dbParameterGroup,
DBCluster targetDbCluster) {
System.err.println("Creating target " + name + " instance...");
CreateDBInstanceRequest request = new CreateDBInstanceRequest()
.withDBInstanceClass(instanceType.value())
.withDBInstanceIdentifier(String.format("neptune-export-%s-%s", name, UUID.randomUUID().toString().substring(0, 5)))
.withDBClusterIdentifier(targetDbCluster.getDBClusterIdentifier())
.withDBParameterGroupName(dbParameterGroup.getDBParameterGroupName())
.withEngine("neptune")
.withTags(getTags(sourceClusterMetadata.clusterId()));
if (StringUtils.isNotEmpty(engineVersion)) {
request = request.withEngineVersion(engineVersion);
}
DBInstance targetDbInstance = neptune.createDBInstance(request);
String instanceStatus = targetDbInstance.getDBInstanceStatus();
while (instanceStatus.equals("creating")) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
instanceStatus = neptune.describeDBInstances(new DescribeDBInstancesRequest()
.withDBInstanceIdentifier(targetDbInstance.getDBInstanceIdentifier()))
.getDBInstances()
.get(0)
.getDBInstanceStatus();
}
}
}
| 4,178 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/CloneClusterStrategy.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
public interface CloneClusterStrategy {
Cluster cloneCluster(ConnectionConfig connectionConfig,
ConcurrencyConfig concurrencyConfig) throws Exception;
}
| 4,179 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/Cluster.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.AmazonNeptune;
import java.util.function.Supplier;
public interface Cluster extends AutoCloseable {
ConnectionConfig connectionConfig();
ConcurrencyConfig concurrencyConfig();
NeptuneClusterMetadata clusterMetadata();
}
| 4,180 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/GetLastEventIdTask.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.io.CommandWriter;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
public class GetLastEventIdTask implements GetLastEventIdStrategy{
private final Cluster cluster;
private final JsonResource<EventId, Object> lastEventIdResource;
private final AtomicReference<EventId> lastEventId = new AtomicReference<>();
public GetLastEventIdTask(Cluster cluster, JsonResource<EventId, Object> lastEventIdResource) {
this.cluster = cluster;
this.lastEventIdResource = lastEventIdResource;
}
@Override
public void saveLastEventId(String streamEndpointType) throws IOException {
EventId eventId = new GetLastEventId(
cluster.clusterMetadata(),
cluster.connectionConfig(),
streamEndpointType).execute();
if (eventId != null){
lastEventId.set(eventId);
lastEventIdResource.save(eventId, null);
}
}
@Override
public void writeLastEventIdResourcePathAsMessage(CommandWriter writer) {
if (lastEventId.get() != null){
lastEventIdResource.writeResourcePathAsMessage(writer);
}
}
}
| 4,181 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/GetClusterIdFromCorrelationId.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.AmazonNeptune;
import com.amazonaws.services.neptune.model.*;
import com.amazonaws.services.neptune.util.Activity;
import com.amazonaws.services.neptune.util.Timer;
import org.apache.commons.lang.StringUtils;
import java.util.List;
import java.util.function.Supplier;
public class GetClusterIdFromCorrelationId {
private final String correlationId;
private final Supplier<AmazonNeptune> amazonNeptuneClientSupplier;
public GetClusterIdFromCorrelationId(String correlationId, Supplier<AmazonNeptune> amazonNeptuneClientSupplier) {
this.correlationId = correlationId;
this.amazonNeptuneClientSupplier = amazonNeptuneClientSupplier;
}
public String execute() {
AmazonNeptune neptune = amazonNeptuneClientSupplier.get();
try {
return Timer.timedActivity("getting cluster ID from correlation ID", false,
(Activity.Callable<String>) () -> getClusterId(neptune));
} finally {
if (neptune != null) {
neptune.shutdown();
}
}
}
private String getClusterId(AmazonNeptune neptune) {
DescribeDBClustersResult describeDBClustersResult = neptune.describeDBClusters(new DescribeDBClustersRequest());
for (DBCluster dbCluster : describeDBClustersResult.getDBClusters()) {
String clusterCorrelationId = getCorrelationId(dbCluster.getDBClusterArn(), neptune);
if (StringUtils.isNotEmpty(clusterCorrelationId) && clusterCorrelationId.equals(correlationId)) {
String clusterId = dbCluster.getDBClusterIdentifier();
System.err.println(String.format("Found cluster ID %s for correlation ID %s", clusterId, correlationId));
return clusterId;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// Do nothing
}
}
System.err.println(String.format("Unable to find cluster ID for correlation ID %s", correlationId));
return null;
}
private String getCorrelationId(String dbClusterArn, AmazonNeptune neptune) {
List<Tag> tagList = neptune.listTagsForResource(
new ListTagsForResourceRequest()
.withResourceName(dbClusterArn)).getTagList();
for (Tag tag : tagList) {
if (tag.getKey().equalsIgnoreCase(NeptuneClusterMetadata.NEPTUNE_EXPORT_CORRELATION_ID_KEY)) {
return tag.getValue();
}
}
return null;
}
}
| 4,182 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/DoNotCloneCluster.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
public class DoNotCloneCluster implements CloneClusterStrategy {
private final NeptuneClusterMetadata clusterMetadata;
public DoNotCloneCluster(NeptuneClusterMetadata clusterMetadata) {
this.clusterMetadata = clusterMetadata;
}
@Override
public Cluster cloneCluster(ConnectionConfig connectionConfig, ConcurrencyConfig concurrencyConfig) throws Exception {
return new Cluster() {
@Override
public ConnectionConfig connectionConfig() {
return connectionConfig;
}
@Override
public ConcurrencyConfig concurrencyConfig() {
return concurrencyConfig;
}
@Override
public NeptuneClusterMetadata clusterMetadata() {
return clusterMetadata;
}
@Override
public void close() throws Exception {
//Do nothing
}
};
}
}
| 4,183 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/NeptuneHttpsClient.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.*;
import com.amazonaws.auth.AWS4Signer;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.http.AmazonHttpClient;
import com.amazonaws.http.ExecutionContext;
import com.amazonaws.http.HttpMethodName;
import java.net.URI;
import java.util.Map;
public class NeptuneHttpsClient {
private final AWSCredentialsProvider awsCredentialsProvider = DefaultAWSCredentialsProviderChain.getInstance();
private final AWS4Signer signer;
private final String uri;
private final boolean disableCertCheck;
public NeptuneHttpsClient(String uri, String region, boolean disableCertCheck) {
this.uri = uri;
this.disableCertCheck = disableCertCheck;
signer = new AWS4Signer();
signer.setRegionName(region);
signer.setServiceName("neptune-db");
}
public HttpResponse get(Map<String, String> queryStringParams) {
Request<Void> request = new DefaultRequest<>(signer.getServiceName());
request.setEndpoint(URI.create(uri));
request.setHttpMethod(HttpMethodName.GET);
for (Map.Entry<String, String> entry : queryStringParams.entrySet()) {
request.addParameter(entry.getKey(), entry.getValue());
}
signer.sign(request, awsCredentialsProvider.getCredentials());
if (disableCertCheck){
System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, "true");
}
Response<HttpResponse> response = new AmazonHttpClient(new ClientConfiguration())
.requestExecutionBuilder()
.executionContext(new ExecutionContext(false))
.request(request)
.errorResponseHandler(new SimpleErrorResponseHandler())
.execute(new SimpleResponseHandler());
return response.getAwsResponse();
}
}
| 4,184 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/NeptuneClusterMetadata.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.AmazonNeptune;
import com.amazonaws.services.neptune.export.EndpointValidator;
import com.amazonaws.services.neptune.model.*;
import org.apache.commons.lang.StringUtils;
import java.util.*;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class NeptuneClusterMetadata {
public static final String NEPTUNE_EXPORT_APPLICATION_TAG = "neptune-export";
public static final String NEPTUNE_EXPORT_CORRELATION_ID_KEY = "correlation-id";
public static String clusterIdFromEndpoint(String endpoint) {
int index = endpoint.indexOf(".");
if (index < 0) {
throw new IllegalArgumentException(String.format("Unable to identify cluster ID from endpoint '%s'. Use the clusterId export parameter instead.", endpoint));
}
return endpoint.substring(0, index);
}
public static NeptuneClusterMetadata createFromEndpoints(Collection<String> endpoints, Supplier<AmazonNeptune> amazonNeptuneClientSupplier) {
AmazonNeptune neptune = amazonNeptuneClientSupplier.get();
String paginationToken = null;
do {
DescribeDBClustersResult describeDBClustersResult = neptune
.describeDBClusters(new DescribeDBClustersRequest()
.withMarker(paginationToken)
.withFilters(new Filter().withName("engine").withValues("neptune")));
paginationToken = describeDBClustersResult.getMarker();
for (DBCluster dbCluster : describeDBClustersResult.getDBClusters()) {
for (String endpoint : endpoints) {
String endpointValue = getEndpointValue(endpoint);
if (endpointValue.equals(getEndpointValue(dbCluster.getEndpoint()))){
return createFromClusterId(dbCluster.getDBClusterIdentifier(), amazonNeptuneClientSupplier);
} else if (endpointValue.equals(getEndpointValue(dbCluster.getReaderEndpoint()))){
return createFromClusterId(dbCluster.getDBClusterIdentifier(), amazonNeptuneClientSupplier);
}
}
}
} while (paginationToken != null);
paginationToken = null;
do {
DescribeDBInstancesResult describeDBInstancesResult = neptune.describeDBInstances(
new DescribeDBInstancesRequest()
.withMarker(paginationToken)
.withFilters(new Filter().withName("engine").withValues("neptune")));
paginationToken = describeDBInstancesResult.getMarker();
for (DBInstance dbInstance : describeDBInstancesResult.getDBInstances()) {
for (String endpoint : endpoints) {
String endpointValue = getEndpointValue(endpoint);
if (endpointValue.equals(getEndpointValue(dbInstance.getEndpoint().getAddress()))){
return createFromClusterId(dbInstance.getDBClusterIdentifier(), amazonNeptuneClientSupplier);
}
}
}
} while (paginationToken != null);
throw new IllegalStateException(String.format("Unable to identify cluster ID from endpoints: %s", endpoints));
}
private static String getEndpointValue(String endpoint) {
return EndpointValidator.validate(endpoint).toLowerCase();
}
public static NeptuneClusterMetadata createFromClusterId(String clusterId, Supplier<AmazonNeptune> amazonNeptuneClientSupplier) {
AmazonNeptune neptune = amazonNeptuneClientSupplier.get();
DescribeDBClustersResult describeDBClustersResult = neptune
.describeDBClusters(new DescribeDBClustersRequest().withDBClusterIdentifier(clusterId));
if (describeDBClustersResult.getDBClusters().isEmpty()) {
throw new IllegalArgumentException(String.format("Unable to find cluster %s", clusterId));
}
DBCluster dbCluster = describeDBClustersResult.getDBClusters().get(0);
List<Tag> tags = neptune.listTagsForResource(
new ListTagsForResourceRequest()
.withResourceName(dbCluster.getDBClusterArn())).getTagList();
Map<String, String> clusterTags = new HashMap<>();
tags.forEach(t -> clusterTags.put(t.getKey(), t.getValue()));
boolean isIAMDatabaseAuthenticationEnabled = dbCluster.isIAMDatabaseAuthenticationEnabled();
Integer port = dbCluster.getPort();
String dbClusterParameterGroup = dbCluster.getDBClusterParameterGroup();
String engineVersion = dbCluster.getEngineVersion();
String dbParameterGroupFamily;
try {
DescribeDBClusterParameterGroupsResult describeDBClusterParameterGroupsResult = neptune.describeDBClusterParameterGroups(
new DescribeDBClusterParameterGroupsRequest()
.withDBClusterParameterGroupName(dbClusterParameterGroup));
Optional<DBClusterParameterGroup> parameterGroup = describeDBClusterParameterGroupsResult
.getDBClusterParameterGroups().stream().findFirst();
dbParameterGroupFamily = parameterGroup.isPresent() ?
parameterGroup.get().getDBParameterGroupFamily() :
"neptune1";
} catch (AmazonNeptuneException e) {
// Older deployments of Neptune Export service may not have requisite permissions to
// describe cluster parameter group, so we'll try and guess the group family.
if (StringUtils.isNotEmpty(engineVersion) && engineVersion.contains(".")) {
int v = Integer.parseInt(engineVersion.split("\\.")[1]);
dbParameterGroupFamily = v > 1 ? "neptune1.2" : "neptune1";
} else {
dbParameterGroupFamily = "neptune1";
}
}
DescribeDBClusterParametersResult describeDBClusterParametersResult = neptune.describeDBClusterParameters(
new DescribeDBClusterParametersRequest()
.withDBClusterParameterGroupName(dbClusterParameterGroup));
Optional<Parameter> neptuneStreamsParameter = describeDBClusterParametersResult.getParameters().stream()
.filter(parameter -> parameter.getParameterName().equals("neptune_streams"))
.findFirst();
boolean isStreamEnabled = neptuneStreamsParameter.isPresent() &&
neptuneStreamsParameter.get().getParameterValue().equals("1");
String dbSubnetGroup = dbCluster.getDBSubnetGroup();
List<VpcSecurityGroupMembership> vpcSecurityGroups = dbCluster.getVpcSecurityGroups();
List<String> vpcSecurityGroupIds = vpcSecurityGroups.stream()
.map(VpcSecurityGroupMembership::getVpcSecurityGroupId)
.collect(Collectors.toList());
List<DBClusterMember> dbClusterMembers = dbCluster.getDBClusterMembers();
Optional<DBClusterMember> clusterWriter = dbClusterMembers.stream()
.filter(DBClusterMember::isClusterWriter)
.findFirst();
String primary = clusterWriter.map(DBClusterMember::getDBInstanceIdentifier).orElse("");
List<String> replicas = dbClusterMembers.stream()
.filter(dbClusterMember -> !dbClusterMember.isClusterWriter())
.map(DBClusterMember::getDBInstanceIdentifier)
.collect(Collectors.toList());
DescribeDBInstancesRequest describeDBInstancesRequest = new DescribeDBInstancesRequest()
.withFilters(Collections.singletonList(
new Filter()
.withName("db-cluster-id")
.withValues(dbCluster.getDBClusterIdentifier())));
DescribeDBInstancesResult describeDBInstancesResult = neptune
.describeDBInstances(describeDBInstancesRequest);
Map<String, NeptuneInstanceMetadata> instanceTypes = new HashMap<>();
describeDBInstancesResult.getDBInstances()
.forEach(c -> instanceTypes.put(
c.getDBInstanceIdentifier(),
new NeptuneInstanceMetadata(
c.getDBInstanceClass(),
c.getDBParameterGroups().get(0).getDBParameterGroupName(),
c.getEndpoint())
));
neptune.shutdown();
return new NeptuneClusterMetadata(clusterId,
port,
engineVersion,
dbClusterParameterGroup,
dbParameterGroupFamily,
isIAMDatabaseAuthenticationEnabled,
isStreamEnabled,
dbSubnetGroup,
vpcSecurityGroupIds,
primary,
replicas,
instanceTypes,
clusterTags,
amazonNeptuneClientSupplier);
}
private final String clusterId;
private final int port;
private final String engineVersion;
private final String dbClusterParameterGroupName;
private final String dbParameterGroupFamily;
private final Boolean isIAMDatabaseAuthenticationEnabled;
private final Boolean isStreamEnabled;
private final String dbSubnetGroupName;
private final Collection<String> vpcSecurityGroupIds;
private final String primary;
private final Collection<String> replicas;
private final Map<String, NeptuneInstanceMetadata> instanceMetadata;
private final Map<String, String> clusterTags;
private final Supplier<AmazonNeptune> amazonNeptuneClientSupplier;
private NeptuneClusterMetadata(String clusterId,
int port,
String engineVersion,
String dbClusterParameterGroupName,
String dbParameterGroupFamily,
Boolean isIAMDatabaseAuthenticationEnabled,
Boolean isStreamEnabled,
String dbSubnetGroupName,
List<String> vpcSecurityGroupIds,
String primary,
Collection<String> replicas,
Map<String, NeptuneInstanceMetadata> instanceMetadata,
Map<String, String> clusterTags,
Supplier<AmazonNeptune> amazonNeptuneClientSupplier) {
this.clusterId = clusterId;
this.port = port;
this.engineVersion = engineVersion;
this.dbClusterParameterGroupName = dbClusterParameterGroupName;
this.dbParameterGroupFamily = dbParameterGroupFamily;
this.isIAMDatabaseAuthenticationEnabled = isIAMDatabaseAuthenticationEnabled;
this.isStreamEnabled = isStreamEnabled;
this.dbSubnetGroupName = dbSubnetGroupName;
this.vpcSecurityGroupIds = vpcSecurityGroupIds;
this.primary = primary;
this.replicas = replicas;
this.instanceMetadata = instanceMetadata;
this.clusterTags = clusterTags;
this.amazonNeptuneClientSupplier = amazonNeptuneClientSupplier;
}
public String clusterId() {
return clusterId;
}
public int port() {
return port;
}
public String engineVersion() {
return engineVersion;
}
public String dbClusterParameterGroupName() {
return dbClusterParameterGroupName;
}
public String dbParameterGroupFamily() {
return dbParameterGroupFamily;
}
public Boolean isIAMDatabaseAuthenticationEnabled() {
return isIAMDatabaseAuthenticationEnabled;
}
public Boolean isStreamEnabled() {
return isStreamEnabled;
}
public String dbSubnetGroupName() {
return dbSubnetGroupName;
}
public Collection<String> vpcSecurityGroupIds() {
return vpcSecurityGroupIds;
}
public String primary() {
return primary;
}
public Collection<String> replicas() {
return replicas;
}
public NeptuneInstanceMetadata instanceMetadataFor(String key) {
return instanceMetadata.get(key);
}
public List<String> endpoints() {
return instanceMetadata.values().stream().map(i -> i.endpoint().getAddress()).collect(Collectors.toList());
}
public boolean isTaggedWithNeptuneExport() {
return clusterTags.containsKey("application") &&
clusterTags.get("application").equalsIgnoreCase(NEPTUNE_EXPORT_APPLICATION_TAG);
}
public Supplier<AmazonNeptune> clientSupplier() {
return amazonNeptuneClientSupplier;
}
public void printDetails(){
System.err.println("Cluster ID : " + clusterId());
System.err.println("Port : " + port());
System.err.println("Engine : " + engineVersion());
System.err.println("IAM DB Auth : " + isIAMDatabaseAuthenticationEnabled());
System.err.println("Streams enabled : " + isStreamEnabled());
System.err.println("Parameter group family : " + dbParameterGroupFamily());
System.err.println("Cluster parameter group : " + dbClusterParameterGroupName());
System.err.println("Subnet group : " + dbSubnetGroupName());
System.err.println("Security group IDs : " + String.join(", ", vpcSecurityGroupIds()));
System.err.println("Instance endpoints : " + String.join(", ", endpoints()));
NeptuneClusterMetadata.NeptuneInstanceMetadata primary = instanceMetadataFor(primary());
System.err.println();
System.err.println("Primary");
System.err.println(" Instance ID : " + primary());
System.err.println(" Instance type : " + primary.instanceType());
System.err.println(" Endpoint : " + primary.endpoint().getAddress());
System.err.println(" Database parameter group : " + primary.dbParameterGroupName());
if (!replicas().isEmpty()) {
for (String replicaId : replicas()) {
NeptuneClusterMetadata.NeptuneInstanceMetadata replica = instanceMetadataFor(replicaId);
System.err.println();
System.err.println("Replica");
System.err.println(" Instance ID : " + replicaId);
System.err.println(" Instance type : " + replica.instanceType());
System.err.println(" Endpoint : " + replica.endpoint().getAddress());
System.err.println(" Database parameter group : " + replica.dbParameterGroupName());
}
}
}
public static class NeptuneInstanceMetadata {
private final String instanceType;
private final String dbParameterGroupName;
private final Endpoint endpoint;
public NeptuneInstanceMetadata(String instanceType, String dbParameterGroupName, Endpoint endpoint) {
this.instanceType = instanceType;
this.dbParameterGroupName = dbParameterGroupName;
this.endpoint = endpoint;
}
public String instanceType() {
return instanceType;
}
public String dbParameterGroupName() {
return dbParameterGroupName;
}
public Endpoint endpoint() {
return endpoint;
}
}
}
| 4,185 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/StreamRecordsNotFoundExceptionParser.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class StreamRecordsNotFoundExceptionParser {
public static class LastEventId {
private final long commitNum;
private final long opNum;
public LastEventId(long commitNum, long opNum) {
this.commitNum = commitNum;
this.opNum = opNum;
}
public long commitNum() {
return commitNum;
}
public long opNum() {
return opNum;
}
@Override
public String toString() {
return "{ " +
"\"commitNum\": " + commitNum +
", \"opNum\": " + opNum +
" }";
}
}
public static EventId parseLastEventId(String errorMessage){
String commitNum = "-1";
String opNum = "-1";
Pattern p = Pattern.compile("\\d+");
Matcher m = p.matcher(errorMessage);
if (m.find()){
commitNum = m.group();
}
if (m.find()){
opNum = m.group();
}
return new EventId(Long.parseLong( commitNum), Long.parseLong(opNum));
}
}
| 4,186 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/ConnectionConfig.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.services.neptune.auth.HandshakeRequestConfig;
import java.util.Collection;
import java.util.Collections;
public class ConnectionConfig {
private final String clusterId;
private final Collection<String> neptuneEndpoints;
private final int neptunePort;
private final boolean useIamAuth;
private boolean useSsl;
private final ProxyConfig proxyConfig;
private final AWSCredentialsProvider credentialsProvider;
public ConnectionConfig(String clusterId,
Collection<String> neptuneEndpoints,
int neptunePort,
boolean useIamAuth, boolean useSsl, ProxyConfig proxyConfig) {
this(clusterId, neptuneEndpoints, neptunePort, useIamAuth, useSsl, proxyConfig, new DefaultAWSCredentialsProviderChain());
}
public ConnectionConfig(String clusterId,
Collection<String> neptuneEndpoints,
int neptunePort,
boolean useIamAuth, boolean useSsl, ProxyConfig proxyConfig,
AWSCredentialsProvider credentialsProvider) {
this.clusterId = clusterId;
this.neptuneEndpoints = neptuneEndpoints;
this.neptunePort = neptunePort;
this.useIamAuth = useIamAuth;
this.useSsl = useSsl;
this.proxyConfig = proxyConfig;
this.credentialsProvider = credentialsProvider;
}
public Collection<String> endpoints() {
if (isDirectConnection()) {
return neptuneEndpoints;
} else {
return Collections.singletonList(proxyConfig.endpoint());
}
}
public int port() {
if (isDirectConnection()) {
return neptunePort;
} else {
return proxyConfig.port();
}
}
public boolean useIamAuth() {
return useIamAuth;
}
public boolean useSsl() {
return useSsl;
}
public HandshakeRequestConfig handshakeRequestConfig() {
if (isDirectConnection()) {
return new HandshakeRequestConfig(Collections.emptyList(), neptunePort, false);
} else {
return new HandshakeRequestConfig(neptuneEndpoints, neptunePort, proxyConfig.removeHostHeader());
}
}
public boolean isDirectConnection() {
return proxyConfig == null;
}
public ProxyConfig proxyConfig() {
return proxyConfig;
}
public AWSCredentialsProvider getCredentialsProvider() {
return credentialsProvider;
}
}
| 4,187 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/ConcurrencyConfig.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.propertygraph.RangeConfig;
import org.apache.tinkerpop.gremlin.driver.Cluster;
import static java.lang.Math.max;
public class ConcurrencyConfig {
private final int concurrency;
public ConcurrencyConfig(int concurrency) {
if (concurrency < 1){
throw new IllegalArgumentException("Concurrency must be >= 1");
}
this.concurrency = concurrency;
}
public int concurrency() {
return concurrency;
}
public boolean isUnboundedParallelExecution(RangeConfig rangeConfig){
return concurrency > 1 && rangeConfig.rangeSize() == -1;
}
public Cluster.Builder applyTo(Cluster.Builder clusterBuilder, int numberOfEndpoints){
if (concurrency == 1){
return clusterBuilder;
}
int calculatedPoolSize = (concurrency/numberOfEndpoints) + 1;
int minPoolSize = max(calculatedPoolSize, 2);
int maxPoolSize = max(calculatedPoolSize, 8);
return clusterBuilder.
minConnectionPoolSize(minPoolSize).
maxConnectionPoolSize(maxPoolSize);
}
}
| 4,188 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/EventId.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.amazonaws.services.neptune.propertygraph.io.Jsonizable;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class EventId implements Jsonizable<Object> {
private final long commitNum;
private final long opNum;
public EventId(long commitNum, long opNum) {
this.commitNum = commitNum;
this.opNum = opNum;
}
public long commitNum() {
return commitNum;
}
public long opNum() {
return opNum;
}
@Override
public String toString() {
return "{ " +
"\"commitNum\": " + commitNum +
", \"opNum\": " + opNum +
" }";
}
@Override
public JsonNode toJson(Object o) {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("commitNum", commitNum);
json.put("opNum", opNum);
return json;
}
}
| 4,189 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/HttpResponse.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class HttpResponse {
private static final String JSON_CONTENT_TYPE = "application/json";
private static final ObjectMapper MAPPER = new ObjectMapper();
private final int status;
private final String content;
private final String contentType;
public HttpResponse(int status, String content, String contentType) {
this.status = status;
this.content = content;
this.contentType = contentType;
}
public int getStatus() {
return status;
}
public String getContent() {
return content;
}
public JsonNode getContentAsJson() throws IOException {
if (contentType.equals(JSON_CONTENT_TYPE)) {
return MAPPER.readTree(content);
} else {
throw new IllegalStateException("Content is not JSON: " + contentType);
}
}
public <T> T getContentAsObject(Class<T> type) throws IOException {
if (contentType.equals(JSON_CONTENT_TYPE)) {
@SuppressWarnings("unchecked")
T returnValue = (T) MAPPER.readerFor(type).readValue(content);
return returnValue;
} else {
throw new IllegalStateException("Content is not JSON: " + contentType);
}
}
public <T> Collection<T> getContentAsCollection(Class<T> type) throws IOException {
if (contentType.equals(JSON_CONTENT_TYPE)) {
ObjectReader reader = MAPPER.readerFor(type);
List<T> results = new ArrayList<>();
ArrayNode array = (ArrayNode) MAPPER.readTree(content);
for (JsonNode node : array) {
results.add(reader.readValue(node));
}
return results;
} else {
throw new IllegalStateException("Content is not JSON: " + contentType);
}
}
}
| 4,190 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/cluster/InstanceType.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
public enum InstanceType {
db_r4_large {
@Override
int concurrency() {
return 4;
}
},
db_r4_xlarge {
@Override
int concurrency() {
return 8;
}
},
db_r4_2xlarge {
@Override
int concurrency() {
return 16;
}
},
db_r4_4xlarge {
@Override
int concurrency() {
return 32;
}
},
db_r4_8xlarge {
@Override
int concurrency() {
return 64;
}
},
db_r5_large {
@Override
int concurrency() {
return 4;
}
},
db_r5_xlarge {
@Override
int concurrency() {
return 8;
}
},
db_r5_2xlarge {
@Override
int concurrency() {
return 16;
}
},
db_r5_4xlarge {
@Override
int concurrency() {
return 32;
}
},
db_r5_8xlarge {
@Override
int concurrency() {
return 64;
}
},
db_r5_12xlarge {
@Override
int concurrency() {
return 96;
}
},
db_r5_16xlarge {
@Override
int concurrency() {
return 128;
}
},
db_r5_24xlarge {
@Override
int concurrency() {
return 192;
}
},
db_r5d_large {
@Override
int concurrency() {
return 4;
}
},
db_r5d_xlarge {
@Override
int concurrency() {
return 8;
}
},
db_r5d_2xlarge {
@Override
int concurrency() {
return 16;
}
},
db_r5d_4xlarge {
@Override
int concurrency() {
return 32;
}
},
db_r5d_8xlarge {
@Override
int concurrency() {
return 64;
}
},
db_r5d_12xlarge {
@Override
int concurrency() {
return 96;
}
},
db_r5d_16xlarge {
@Override
int concurrency() {
return 128;
}
},
db_r5d_24xlarge {
@Override
int concurrency() {
return 192;
}
},
db_r6g_large {
@Override
int concurrency() {
return 4;
}
},
db_r6g_xlarge {
@Override
int concurrency() {
return 8;
}
},
db_r6g_2xlarge {
@Override
int concurrency() {
return 16;
}
},
db_r6g_4xlarge {
@Override
int concurrency() {
return 32;
}
},
db_r6g_8xlarge {
@Override
int concurrency() {
return 64;
}
},
db_r6g_12xlarge {
@Override
int concurrency() {
return 96;
}
},
db_r6g_16xlarge {
@Override
int concurrency() {
return 128;
}
},
db_x2g_large {
@Override
int concurrency() {
return 4;
}
},
db_x2g_xlarge {
@Override
int concurrency() {
return 8;
}
},
db_x2g_2xlarge {
@Override
int concurrency() {
return 16;
}
},
db_x2g_4xlarge {
@Override
int concurrency() {
return 32;
}
},
db_x2g_8xlarge {
@Override
int concurrency() {
return 64;
}
},
db_x2g_12xlarge {
@Override
int concurrency() {
return 96;
}
},
db_x2g_16xlarge {
@Override
int concurrency() {
return 128;
}
},
db_m5_large {
@Override
int concurrency() {
return 4;
}
},
db_m5_xlarge {
@Override
int concurrency() {
return 8;
}
},
db_m5_2xlarge {
@Override
int concurrency() {
return 16;
}
},
db_m5_3xlarge {
@Override
int concurrency() {
return 32;
}
},
db_m5_8xlarge {
@Override
int concurrency() {
return 64;
}
},
db_m5_12xlarge {
@Override
int concurrency() {
return 96;
}
},
db_m5_16xlarge {
@Override
int concurrency() {
return 128;
}
},
db_m5_24xlarge {
@Override
int concurrency() {
return 192;
}
},
db_t3_medium {
@Override
int concurrency() {
return 4;
}
};
public static InstanceType parse(String value) {
String typeName = value.startsWith("db.") ?
value :
String.format("db.%s", value);
typeName = typeName.toLowerCase().replace(".", "_");
try {
return InstanceType.valueOf(typeName);
} catch (IllegalArgumentException e) {
return db_r5_2xlarge;
}
}
abstract int concurrency();
public String value() {
return name().replace("_", ".");
}
@Override
public String toString() {
return value();
}
}
| 4,191 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/GitProperties.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class GitProperties {
private final String commitId;
private final String buildVersion;
private final String commitTime;
private final String buildTime;
public GitProperties(String commitId, String buildVersion, String commitTime, String buildTime) {
this.commitId = commitId;
this.buildVersion = buildVersion;
this.commitTime = commitTime;
this.buildTime = buildTime;
}
public String commitId() {
return commitId;
}
public static GitProperties fromResource() {
Properties properties = new Properties();
try {
InputStream stream = ClassLoader.getSystemResourceAsStream("git.properties");
if (stream != null) {
properties.load(stream);
stream.close();
}
} catch (IOException e) {
// Do nothing
}
return new GitProperties(
properties.getProperty("git.commit.id", "unknown"),
properties.getProperty("git.build.version", "unknown"),
properties.getProperty("git.commit.time", "unknown"),
properties.getProperty("git.build.time", "unknown"));
}
@Override
public String toString() {
return "[" +
"buildVersion='" + buildVersion + '\'' +
", buildTime='" + buildTime + '\'' +
", commitId='" + commitId + '\'' +
", commitTime='" + commitTime + '\'' +
']';
}
}
| 4,192 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/Activity.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
public interface Activity {
interface Runnable extends Activity {
void run();
}
interface Callable<T> extends Activity {
T call();
}
}
| 4,193 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/S3ObjectInfo.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
import com.amazonaws.services.s3.Headers;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.SSEAlgorithm;
import org.apache.commons.lang.StringUtils;
import java.io.File;
import java.net.URI;
public class S3ObjectInfo {
private final String bucket;
private final String key;
private final String fileName;
public S3ObjectInfo(String s3Uri) {
URI uri = URI.create(s3Uri);
bucket = uri.getAuthority();
String path = uri.getPath();
key = StringUtils.isNotEmpty(path) ? path.substring(1) : "";
fileName = new File(uri.getPath()).getName();
}
public String bucket() {
return bucket;
}
public String key() {
return key;
}
public static ObjectMetadata createObjectMetadata(long contentLength, String sseKmsKeyId, ObjectMetadata objectMetadata){
objectMetadata.setContentLength(contentLength);
if (!StringUtils.isBlank(sseKmsKeyId)) {
objectMetadata.setSSEAlgorithm(SSEAlgorithm.KMS.getAlgorithm());
objectMetadata.setHeader(
Headers.SERVER_SIDE_ENCRYPTION_AWS_KMS_KEYID,
sseKmsKeyId
);
} else {
objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
}
return objectMetadata;
}
public static ObjectMetadata createObjectMetadata(long contentLength, String sseKmsKeyId) {
return createObjectMetadata(contentLength, sseKmsKeyId, new ObjectMetadata());
}
public File createDownloadFile(String parent) {
return new File(parent, fileName);
}
public S3ObjectInfo withNewKeySuffix(String suffix) {
File file = StringUtils.isNotEmpty(key) ? new File(key, suffix) : new File(suffix);
return new S3ObjectInfo( String.format("s3://%s/%s", bucket, file.getPath()));
}
public S3ObjectInfo replaceOrAppendKey(String placeholder, String ifPresent, String ifAbsent) {
File file = key.contains(placeholder) ?
new File(key.replace(placeholder, ifPresent)) :
new File(key, ifAbsent);
return new S3ObjectInfo( String.format("s3://%s/%s", bucket, file.getPath()));
}
public S3ObjectInfo replaceOrAppendKey(String placeholder, String ifPresent) {
return replaceOrAppendKey(placeholder, ifPresent, ifPresent);
}
@Override
public String toString() {
return String.format("s3://%s/%s", bucket, key);
}
}
| 4,194 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/NotImplementedException.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
public class NotImplementedException extends RuntimeException {
}
| 4,195 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/Timer.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
public class Timer {
public static void timedActivity(String description, Activity.Runnable activity) {
timedActivity(description, true, activity);
}
public static void timedActivity(String description, boolean padWithNewlines, Activity.Runnable activity) {
long start = System.currentTimeMillis();
try {
activity.run();
printSuccess(description, padWithNewlines, start);
} catch (Exception e) {
printFailure(description, padWithNewlines, start);
throw e;
}
}
public static void timedActivity(String description, CheckedActivity.Runnable activity) throws Exception {
timedActivity(description, true, activity);
}
public static void timedActivity(String description, boolean padWithNewlines, CheckedActivity.Runnable activity) throws Exception {
long start = System.currentTimeMillis();
try {
activity.run();
printSuccess(description, padWithNewlines, start);
} catch (Exception e) {
printFailure(description, padWithNewlines, start);
throw e;
}
}
public static <T> T timedActivity(String description, Activity.Callable<T> activity) {
return timedActivity(description, true, activity);
}
public static <T> T timedActivity(String description, boolean padWithNewlines, Activity.Callable<T> activity) {
long start = System.currentTimeMillis();
try {
T result = activity.call();
printSuccess(description, padWithNewlines, start);
return result;
} catch (Exception e) {
printFailure(description, padWithNewlines, start);
throw e;
}
}
public static <T> T timedActivity(String description, CheckedActivity.Callable<T> activity) throws Exception {
return timedActivity(description, true, activity);
}
public static <T> T timedActivity(String description, boolean padWithNewlines, CheckedActivity.Callable<T> activity) throws Exception {
long start = System.currentTimeMillis();
try {
T result = activity.call();
printSuccess(description, padWithNewlines, start);
return result;
} catch (Exception e) {
printFailure(description, padWithNewlines, start);
throw e;
}
}
private static void printSuccess(String description, boolean padWithNewlines, long start) {
if (padWithNewlines) {
System.err.println();
}
System.err.println(String.format("Completed %s in %s seconds", description, (System.currentTimeMillis() - start) / 1000));
if (padWithNewlines) {
System.err.println();
}
}
private static void printFailure(String description, boolean padWithNewlines, long start) {
if (padWithNewlines) {
System.err.println();
}
System.err.println(String.format("An error occurred while %s. Elapsed time: %s seconds", description, (System.currentTimeMillis() - start) / 1000));
if (padWithNewlines) {
System.err.println();
}
}
}
| 4,196 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/CheckedActivity.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
public interface CheckedActivity {
interface Runnable extends CheckedActivity {
void run() throws Exception;
}
interface Callable<T> extends CheckedActivity{
T call() throws Exception;
}
}
| 4,197 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/AWSCredentialsUtil.java
|
package com.amazonaws.services.neptune.util;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.regions.DefaultAwsRegionProviderChain;
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClient;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AWSCredentialsUtil {
private static final Logger logger = LoggerFactory.getLogger(AWSCredentialsUtil.class);
public static AWSCredentialsProvider getProfileCredentialsProvider(String profileName, String profilePath) {
if (StringUtils.isEmpty(profileName) && StringUtils.isEmpty(profilePath)) {
return new DefaultAWSCredentialsProviderChain();
}
if (StringUtils.isEmpty(profilePath)) {
logger.debug(String.format("Using ProfileCredentialsProvider with profile: %s", profileName));
return new ProfileCredentialsProvider(profileName);
}
logger.debug(String.format("Using ProfileCredentialsProvider with profile: %s and credentials file: ", profileName, profilePath));
return new ProfileCredentialsProvider(profilePath, profileName);
}
public static AWSCredentialsProvider getSTSAssumeRoleCredentialsProvider(String roleARN, String sessionName, String externalId) {
return getSTSAssumeRoleCredentialsProvider(roleARN, sessionName, externalId, new DefaultAWSCredentialsProviderChain());
}
public static AWSCredentialsProvider getSTSAssumeRoleCredentialsProvider(String roleARN,
String sessionName,
String externalId,
AWSCredentialsProvider sourceCredentialsProvider) {
return getSTSAssumeRoleCredentialsProvider(roleARN, sessionName, externalId, sourceCredentialsProvider,
new DefaultAwsRegionProviderChain().getRegion());
}
public static AWSCredentialsProvider getSTSAssumeRoleCredentialsProvider(String roleARN,
String sessionName,
String externalId,
AWSCredentialsProvider sourceCredentialsProvider,
String region) {
STSAssumeRoleSessionCredentialsProvider.Builder providerBuilder = new STSAssumeRoleSessionCredentialsProvider.Builder(roleARN, sessionName)
.withStsClient(
AWSSecurityTokenServiceClient.builder().withCredentials(sourceCredentialsProvider).withRegion(region).build());
if (externalId != null) {
providerBuilder = providerBuilder.withExternalId(externalId);
}
logger.debug(String.format("Assuming Role: %s with session name: %s", roleARN, sessionName));
return providerBuilder.build();
}
}
| 4,198 |
0 |
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/util/EnvironmentVariableUtils.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
public class EnvironmentVariableUtils {
public static String getMandatoryEnv(String name) {
if (isNullOrEmpty(System.getenv(name))) {
throw new IllegalStateException(String.format("Missing environment variable: %s", name));
}
return System.getenv(name);
}
public static String getOptionalEnv(String name, String defaultValue) {
if (isNullOrEmpty(System.getenv(name))) {
return defaultValue;
}
return System.getenv(name);
}
private static boolean isNullOrEmpty(String value) {
return value == null || value.isEmpty();
}
}
| 4,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.