index
int64 0
0
| repo_id
stringlengths 26
205
| file_path
stringlengths 51
246
| content
stringlengths 8
433k
| __index_level_0__
int64 0
10k
|
---|---|---|---|---|
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheNodeList.java
|
package com.netflix.evcache.pool;
import java.io.IOException;
import java.net.UnknownServiceException;
import java.util.Map;
public interface EVCacheNodeList {
/**
* Discover memcached instances suitable for our use from the Discovery
* Service.
*
*
* @param appName The EVCache app for which we need instances
* @throws UnknownServiceException
* if no suitable instances can be found
* @throws IllegalStateException
* if an error occurred in the Discovery service
*
* TODO : Add a fallback to get the list say from PersistedProperties
*/
public abstract Map<ServerGroup, EVCacheServerGroupConfig> discoverInstances(String appName) throws IOException;
}
| 800 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheNodeLocator.java
|
package com.netflix.evcache.pool;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import com.netflix.archaius.api.Property;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.evcache.util.EVCacheConfig;
import net.spy.memcached.DefaultHashAlgorithm;
import net.spy.memcached.EVCacheMemcachedNodeROImpl;
import net.spy.memcached.HashAlgorithm;
import net.spy.memcached.MemcachedNode;
import net.spy.memcached.NodeLocator;
import net.spy.memcached.util.KetamaNodeLocatorConfiguration;
public class EVCacheNodeLocator implements NodeLocator {
private static final Logger log = LoggerFactory.getLogger(EVCacheNodeLocator.class);
private TreeMap<Long, MemcachedNode> ketamaNodes;
protected final EVCacheClient client;
private final Property<Boolean> partialStringHash;
private final Property<String> hashDelimiter;
private final Collection<MemcachedNode> allNodes;
private final HashAlgorithm hashingAlgorithm;
private final KetamaNodeLocatorConfiguration config;
/**
* Create a new KetamaNodeLocator using specified nodes and the specifed
* hash algorithm and configuration.
*
* @param nodes
* The List of nodes to use in the Ketama consistent hash
* continuum
* @param alg
* The hash algorithm to use when choosing a node in the Ketama
* consistent hash continuum
* @param conf
*/
public EVCacheNodeLocator(EVCacheClient client, List<MemcachedNode> nodes, HashAlgorithm alg, KetamaNodeLocatorConfiguration conf) {
super();
this.allNodes = nodes;
this.hashingAlgorithm = alg;
this.config = conf;
this.client = client;
this.partialStringHash = EVCacheConfig.getInstance().getPropertyRepository().get(client.getAppName() + "." + client.getServerGroupName() + ".hash.on.partial.key", Boolean.class)
.orElseGet(client.getAppName()+ ".hash.on.partial.key").orElse(false);
this.hashDelimiter = EVCacheConfig.getInstance().getPropertyRepository().get(client.getAppName() + "." + client.getServerGroupName() + ".hash.delimiter", String.class)
.orElseGet(client.getAppName() + ".hash.delimiter").orElse(":");
setKetamaNodes(nodes);
}
private EVCacheNodeLocator(EVCacheClient client, TreeMap<Long, MemcachedNode> smn, Collection<MemcachedNode> an, HashAlgorithm alg, KetamaNodeLocatorConfiguration conf) {
super();
this.ketamaNodes = smn;
this.allNodes = an;
this.hashingAlgorithm = alg;
this.config = conf;
this.client = client;
this.partialStringHash = EVCacheConfig.getInstance().getPropertyRepository().get(client.getAppName() + "." + client.getServerGroupName() + ".hash.on.partial.key", Boolean.class)
.orElseGet(client.getAppName()+ ".hash.on.partial.key").orElse(false);
this.hashDelimiter = EVCacheConfig.getInstance().getPropertyRepository().get(client.getAppName() + "." + client.getServerGroupName() + ".hash.delimiter", String.class)
.orElseGet(client.getAppName() + ".hash.delimiter").orElse(":");
}
/*
* @see net.spy.memcached.NodeLocator#getAll
*/
public Collection<MemcachedNode> getAll() {
return allNodes;
}
/*
* @see net.spy.memcached.NodeLocator#getPrimary
*/
public MemcachedNode getPrimary(String k) {
if (partialStringHash.get()) {
final int index = k.indexOf(hashDelimiter.get());
if (index > 0) {
k = k.substring(0, index);
}
}
final long hash = hashingAlgorithm.hash(k);
Map.Entry<Long, MemcachedNode> entry = ketamaNodes.ceilingEntry(hash);
if (entry == null) {
entry = ketamaNodes.firstEntry();
}
return entry.getValue();
}
/*
* @return Returns the max key in the hashing distribution
*/
public long getMaxKey() {
return getKetamaNodes().lastKey().longValue();
}
public MemcachedNode getNodeForKey(long _hash) {
long start = (log.isDebugEnabled()) ? System.nanoTime() : 0;
try {
Long hash = Long.valueOf(_hash);
hash = ketamaNodes.ceilingKey(hash);
if (hash == null) {
hash = ketamaNodes.firstKey();
}
return ketamaNodes.get(hash);
} finally {
if (log.isDebugEnabled()) {
final long end = System.nanoTime();
log.debug("getNodeForKey : \t" + (end - start) / 1000);
}
}
}
public Iterator<MemcachedNode> getSequence(String k) {
final List<MemcachedNode> allKetamaNodes = new ArrayList<MemcachedNode>(getKetamaNodes().values());
Collections.shuffle(allKetamaNodes);
return allKetamaNodes.iterator();
}
public NodeLocator getReadonlyCopy() {
final TreeMap<Long, MemcachedNode> ketamaNaodes = new TreeMap<Long, MemcachedNode>(getKetamaNodes());
final Collection<MemcachedNode> aNodes = new ArrayList<MemcachedNode>(allNodes.size());
// Rewrite the values a copy of the map.
for (Map.Entry<Long, MemcachedNode> me : ketamaNaodes.entrySet()) {
me.setValue(new EVCacheMemcachedNodeROImpl(me.getValue()));
}
// Copy the allNodes collection.
for (MemcachedNode n : allNodes) {
aNodes.add(new EVCacheMemcachedNodeROImpl(n));
}
return new EVCacheNodeLocator(client, ketamaNaodes, aNodes, hashingAlgorithm, config);
}
/**
* @return the ketamaNodes
*/
protected TreeMap<Long, MemcachedNode> getKetamaNodes() {
return ketamaNodes;
}
/**
* @return the readonly view of ketamaNodes. This is mailnly for admin
* purposes
*/
public Map<Long, MemcachedNode> getKetamaNodeMap() {
return Collections.<Long, MemcachedNode> unmodifiableMap(ketamaNodes);
}
/**
* Setup the KetamaNodeLocator with the list of nodes it should use.
*
* @param nodes
* a List of MemcachedNodes for this KetamaNodeLocator to use in
* its continuum
*/
protected final void setKetamaNodes(List<MemcachedNode> nodes) {
TreeMap<Long, MemcachedNode> newNodeMap = new TreeMap<Long, MemcachedNode>();
final int numReps = config.getNodeRepetitions();
for (MemcachedNode node : nodes) {
// Ketama does some special work with md5 where it reuses chunks.
if (hashingAlgorithm == DefaultHashAlgorithm.KETAMA_HASH) {
for (int i = 0; i < numReps / 4; i++) {
final String hashString = config.getKeyForNode(node, i);
byte[] digest = DefaultHashAlgorithm.computeMd5(hashString);
if (log.isDebugEnabled()) log.debug("digest : " + digest);
for (int h = 0; h < 4; h++) {
long k = ((long) (digest[3 + h * 4] & 0xFF) << 24)
| ((long) (digest[2 + h * 4] & 0xFF) << 16)
| ((long) (digest[1 + h * 4] & 0xFF) << 8)
| (digest[h * 4] & 0xFF);
newNodeMap.put(Long.valueOf(k), node);
if (log.isDebugEnabled()) log.debug("Key : " + hashString + " ; hash : " + k + "; node " + node );
}
}
} else {
for (int i = 0; i < numReps; i++) {
final Long hashL = Long.valueOf(hashingAlgorithm.hash(config.getKeyForNode(node, i)));
newNodeMap.put(hashL, node);
}
}
}
if (log.isDebugEnabled()) log.debug("NewNodeMapSize : " + newNodeMap.size() + "; MapSize : " + (numReps * nodes.size()));
if (log.isTraceEnabled()) {
for(Long key : newNodeMap.keySet()) {
log.trace("Hash : " + key + "; Node : " + newNodeMap.get(key));
}
}
ketamaNodes = newNodeMap;
}
@Override
public void updateLocator(List<MemcachedNode> nodes) {
setKetamaNodes(nodes);
}
@Override
public String toString() {
return "EVCacheNodeLocator [ketamaNodes=" + ketamaNodes + ", EVCacheClient=" + client + ", partialStringHash=" + partialStringHash
+ ", hashDelimiter=" + hashDelimiter + ", allNodes=" + allNodes + ", hashingAlgorithm=" + hashingAlgorithm + ", config=" + config + "]";
}
}
| 801 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheClientUtil.java
|
package com.netflix.evcache.pool;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import com.netflix.evcache.EVCacheKey;
import net.spy.memcached.transcoders.Transcoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.evcache.EVCacheLatch;
import com.netflix.evcache.EVCacheLatch.Policy;
import com.netflix.evcache.operation.EVCacheLatchImpl;
import net.spy.memcached.CachedData;
public class EVCacheClientUtil {
private static final Logger log = LoggerFactory.getLogger(EVCacheClientUtil.class);
private final ChunkTranscoder ct = new ChunkTranscoder();
private final String _appName;
private final long _operationTimeout;
public EVCacheClientUtil(String appName, long operationTimeout) {
this._appName = appName;
this._operationTimeout = operationTimeout;
}
//TODO: Remove this todo. This method has been made hashing agnostic.
/**
* TODO : once metaget is available we need to get the remaining ttl from an existing entry and use it
*/
public EVCacheLatch add(EVCacheKey evcKey, final CachedData cd, Transcoder evcacheValueTranscoder, int timeToLive, Policy policy, final EVCacheClient[] clients, int latchCount, boolean fixMissing) throws Exception {
if (cd == null) return null;
final EVCacheLatchImpl latch = new EVCacheLatchImpl(policy, latchCount, _appName);
CachedData cdHashed = null;
Boolean firstStatus = null;
for (EVCacheClient client : clients) {
CachedData cd1;
if (evcKey.getHashKey(client.isDuetClient(), client.getHashingAlgorithm(), client.shouldEncodeHashKey(), client.getMaxDigestBytes(), client.getMaxHashLength(), client.getBaseEncoder()) != null) {
if(cdHashed == null) {
final EVCacheValue val = new EVCacheValue(evcKey.getCanonicalKey(client.isDuetClient()), cd.getData(), cd.getFlags(), timeToLive, System.currentTimeMillis());
cdHashed = evcacheValueTranscoder.encode(val);
}
cd1 = cdHashed;
} else {
cd1 = cd;
}
String key = evcKey.getDerivedKey(client.isDuetClient(), client.getHashingAlgorithm(), client.shouldEncodeHashKey(), client.getMaxDigestBytes(), client.getMaxHashLength(), client.getBaseEncoder());
final Future<Boolean> f = client.add(key, timeToLive, cd1, latch);
if (log.isDebugEnabled()) log.debug("ADD : Op Submitted : APP " + _appName + ", key " + key + "; future : " + f + "; client : " + client);
if(fixMissing) {
boolean status = f.get().booleanValue();
if(!status) { // most common case
if(firstStatus == null) {
for(int i = 0; i < clients.length; i++) {
latch.countDown();
}
return latch;
} else {
return fixup(client, clients, evcKey, timeToLive, policy);
}
}
if(firstStatus == null) firstStatus = Boolean.valueOf(status);
}
}
return latch;
}
private EVCacheLatch fixup(EVCacheClient sourceClient, EVCacheClient[] destClients, EVCacheKey evcKey, int timeToLive, Policy policy) {
final EVCacheLatchImpl latch = new EVCacheLatchImpl(policy, destClients.length, _appName);
try {
final CachedData readData = sourceClient.get(evcKey.getDerivedKey(sourceClient.isDuetClient(), sourceClient.getHashingAlgorithm(), sourceClient.shouldEncodeHashKey(), sourceClient.getMaxDigestBytes(), sourceClient.getMaxHashLength(), sourceClient.getBaseEncoder()), ct, false, false);
if(readData != null) {
sourceClient.touch(evcKey.getDerivedKey(sourceClient.isDuetClient(), sourceClient.getHashingAlgorithm(), sourceClient.shouldEncodeHashKey(), sourceClient.getMaxDigestBytes(), sourceClient.getMaxHashLength(), sourceClient.getBaseEncoder()), timeToLive);
for(EVCacheClient destClient : destClients) {
destClient.set(evcKey.getDerivedKey(destClient.isDuetClient(), destClient.getHashingAlgorithm(), destClient.shouldEncodeHashKey(), destClient.getMaxDigestBytes(), destClient.getMaxHashLength(), destClient.getBaseEncoder()), readData, timeToLive, latch);
}
}
latch.await(_operationTimeout, TimeUnit.MILLISECONDS);
} catch (Exception e) {
log.error("Error reading the data", e);
}
return latch;
}
}
| 802 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheClient.java
|
package com.netflix.evcache.pool;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.*;
import java.util.zip.CRC32;
import java.util.zip.Checksum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.archaius.api.Property;
import com.netflix.evcache.EVCache;
import com.netflix.evcache.EVCache.Call;
import com.netflix.evcache.EVCacheConnectException;
import com.netflix.evcache.EVCacheException;
import com.netflix.evcache.EVCacheLatch;
import com.netflix.evcache.EVCacheReadQueueException;
import com.netflix.evcache.EVCacheSerializingTranscoder;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import com.netflix.evcache.operation.EVCacheFutures;
import com.netflix.evcache.operation.EVCacheItem;
import com.netflix.evcache.operation.EVCacheItemMetaData;
import com.netflix.evcache.operation.EVCacheLatchImpl;
import com.netflix.evcache.pool.observer.EVCacheConnectionObserver;
import com.netflix.evcache.util.EVCacheConfig;
import com.netflix.evcache.util.KeyHasher;
import com.netflix.evcache.util.KeyHasher.HashingAlgorithm;
import com.netflix.spectator.api.BasicTag;
import com.netflix.spectator.api.Counter;
import com.netflix.spectator.api.Tag;
import net.spy.memcached.CASValue;
import net.spy.memcached.CachedData;
import net.spy.memcached.ConnectionFactory;
import net.spy.memcached.EVCacheMemcachedClient;
import net.spy.memcached.EVCacheNode;
import net.spy.memcached.MemcachedClient;
import net.spy.memcached.MemcachedNode;
import net.spy.memcached.NodeLocator;
import net.spy.memcached.internal.ListenableFuture;
import net.spy.memcached.internal.OperationCompletionListener;
import net.spy.memcached.internal.OperationFuture;
import net.spy.memcached.transcoders.Transcoder;
import rx.Scheduler;
import rx.Single;
@SuppressWarnings({"rawtypes", "unchecked"})
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings({ "REC_CATCH_EXCEPTION",
"RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE" })
public class EVCacheClient {
private static final Logger log = LoggerFactory.getLogger(EVCacheClient.class);
private final ConnectionFactory connectionFactory;
private final EVCacheMemcachedClient evcacheMemcachedClient;
private final List<InetSocketAddress> memcachedNodesInZone;
private EVCacheConnectionObserver connectionObserver = null;
private boolean shutdown = false;
private final int id;
private final String appName;
private final String zone;
private final ServerGroup serverGroup;
private final EVCacheServerGroupConfig config;
private final int maxWriteQueueSize;
private final Property<Integer> readTimeout;
private final Property<Integer> bulkReadTimeout;
private final Property<Integer> maxReadQueueSize;
private final Property<Boolean> ignoreInactiveNodes;
private final Property<Boolean> enableChunking;
private final Property<Boolean> hashKeyByServerGroup;
private final Property<Boolean> shouldEncodeHashKey;
private final Property<Integer> maxDigestBytes;
private final Property<Integer> maxHashLength;
private final Property<Integer> chunkSize, writeBlock;
private final Property<String> encoderBase;
private final ChunkTranscoder chunkingTranscoder;
private final EVCacheSerializingTranscoder decodingTranscoder;
private static final int SPECIAL_BYTEARRAY = (8 << 8);
private final EVCacheClientPool pool;
// private Counter addCounter = null;
private final Property<Boolean> ignoreTouch;
private List<Tag> tags;
private final Map<String, Counter> counterMap = new ConcurrentHashMap<String, Counter>();
private final Property<String> hashingAlgo;
protected final Counter operationsCounter;
private final boolean isDuetClient;
EVCacheClient(String appName, String zone, int id, EVCacheServerGroupConfig config,
List<InetSocketAddress> memcachedNodesInZone, int maxQueueSize, Property<Integer> maxReadQueueSize,
Property<Integer> readTimeout, Property<Integer> bulkReadTimeout,
Property<Integer> opQueueMaxBlockTime,
Property<Integer> operationTimeout, EVCacheClientPool pool, boolean isDuetClient) throws IOException {
this.memcachedNodesInZone = memcachedNodesInZone;
this.id = id;
this.appName = appName;
this.zone = zone;
this.config = config;
this.serverGroup = config.getServerGroup();
this.readTimeout = readTimeout;
this.bulkReadTimeout = bulkReadTimeout;
this.maxReadQueueSize = maxReadQueueSize;
// this.operationTimeout = operationTimeout;
this.pool = pool;
this.isDuetClient = isDuetClient;
final List<Tag> tagList = new ArrayList<Tag>(4);
EVCacheMetricsFactory.getInstance().addAppNameTags(tagList, appName);
tagList.add(new BasicTag(EVCacheMetricsFactory.CONNECTION_ID, String.valueOf(id)));
tagList.add(new BasicTag(EVCacheMetricsFactory.SERVERGROUP, serverGroup.getName()));
this.tags = Collections.<Tag>unmodifiableList(new ArrayList(tagList));
tagList.add(new BasicTag(EVCacheMetricsFactory.STAT_NAME, EVCacheMetricsFactory.POOL_OPERATIONS));
operationsCounter = EVCacheMetricsFactory.getInstance().getCounter(EVCacheMetricsFactory.INTERNAL_STATS, tagList);
this.enableChunking = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName()+ ".chunk.data", Boolean.class).orElseGet(appName + ".chunk.data").orElse(false);
this.chunkSize = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".chunk.size", Integer.class).orElseGet(appName + ".chunk.size").orElse(1180);
this.writeBlock = EVCacheConfig.getInstance().getPropertyRepository().get(appName + "." + this.serverGroup.getName() + ".write.block.duration", Integer.class).orElseGet(appName + ".write.block.duration").orElse(25);
this.chunkingTranscoder = new ChunkTranscoder();
this.maxWriteQueueSize = maxQueueSize;
this.ignoreTouch = EVCacheConfig.getInstance().getPropertyRepository().get(appName + "." + this.serverGroup.getName() + ".ignore.touch", Boolean.class).orElseGet(appName + ".ignore.touch").orElse(false);
this.connectionFactory = pool.getEVCacheClientPoolManager().getConnectionFactoryProvider().getConnectionFactory(this);
this.connectionObserver = new EVCacheConnectionObserver(this);
this.ignoreInactiveNodes = EVCacheConfig.getInstance().getPropertyRepository().get(appName + ".ignore.inactive.nodes", Boolean.class).orElse(true);
this.evcacheMemcachedClient = new EVCacheMemcachedClient(connectionFactory, memcachedNodesInZone, readTimeout, this);
this.evcacheMemcachedClient.addObserver(connectionObserver);
this.decodingTranscoder = new EVCacheSerializingTranscoder(Integer.MAX_VALUE);
decodingTranscoder.setCompressionThreshold(Integer.MAX_VALUE);
this.hashKeyByServerGroup = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".hash.key", Boolean.class).orElse(null);
this.hashingAlgo = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".hash.algo", String.class).orElseGet(appName + ".hash.algo").orElse("siphash24");
this.shouldEncodeHashKey = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".hash.encode", Boolean.class).orElse(null);
this.maxDigestBytes = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".max.digest.bytes", Integer.class).orElse(null);
this.maxHashLength = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".max.hash.length", Integer.class).orElse(null);
this.encoderBase = EVCacheConfig.getInstance().getPropertyRepository().get(this.serverGroup.getName() + ".hash.encoder", String.class).orElse("base64");
ping();
}
public void ping() {
try {
final Map<SocketAddress, String> versions = getVersions();
for (Entry<SocketAddress, String> vEntry : versions.entrySet()) {
if (log.isDebugEnabled()) log.debug("Host : " + vEntry.getKey() + " : " + vEntry.getValue());
}
} catch (Throwable t) {
log.error("Error while pinging the servers", t);
}
}
public boolean isDuetClient() {
return isDuetClient;
}
public Boolean shouldEncodeHashKey() {
return this.shouldEncodeHashKey.get();
}
public String getBaseEncoder() {
return this.encoderBase.get();
}
public Integer getMaxDigestBytes() {
return this.maxDigestBytes.get();
}
public Integer getMaxHashLength() {
return this.maxHashLength.get();
}
private Collection<String> validateReadQueueSize(Collection<String> canonicalKeys, EVCache.Call call) {
if (evcacheMemcachedClient.getNodeLocator() == null) return canonicalKeys;
final Collection<String> retKeys = new ArrayList<>(canonicalKeys.size());
for (String key : canonicalKeys) {
final MemcachedNode node = evcacheMemcachedClient.getNodeLocator().getPrimary(key);
if (node instanceof EVCacheNode) {
final EVCacheNode evcNode = (EVCacheNode) node;
if (!evcNode.isAvailable(call)) {
continue;
}
final int size = evcNode.getReadQueueSize();
final boolean canAddToOpQueue = size < (maxReadQueueSize.get() * 2);
// if (log.isDebugEnabled()) log.debug("Bulk Current Read Queue
// Size - " + size + " for app " + appName + " & zone " + zone +
// " ; node " + node);
if (!canAddToOpQueue) {
final String hostName;
if(evcNode.getSocketAddress() instanceof InetSocketAddress) {
hostName = ((InetSocketAddress)evcNode.getSocketAddress()).getHostName();
} else {
hostName = evcNode.getSocketAddress().toString();
}
incrementFailure(EVCacheMetricsFactory.READ_QUEUE_FULL, call, hostName);
if (log.isDebugEnabled()) log.debug("Read Queue Full on Bulk Operation for app : " + appName
+ "; zone : " + zone + "; Current Size : " + size + "; Max Size : " + maxReadQueueSize.get() * 2);
} else {
retKeys.add(key);
}
}
}
return retKeys;
}
private void incrementFailure(String metric, EVCache.Call call) {
incrementFailure(metric, call, null);
}
private void incrementFailure(String metric, EVCache.Call call, String host) {
Counter counter = counterMap.get(metric);
if(counter == null) {
final List<Tag> tagList = new ArrayList<Tag>(6);
tagList.addAll(tags);
if(call != null) {
tagList.add(new BasicTag(EVCacheMetricsFactory.CALL_TAG, call.name()));
switch(call) {
case GET:
case GETL:
case GET_AND_TOUCH:
case ASYNC_GET:
case BULK:
case GET_ALL:
tagList.add(new BasicTag(EVCacheMetricsFactory.CALL_TYPE_TAG, EVCacheMetricsFactory.READ));
break;
default :
tagList.add(new BasicTag(EVCacheMetricsFactory.CALL_TYPE_TAG, EVCacheMetricsFactory.WRITE));
break;
}
}
tagList.add(new BasicTag(EVCacheMetricsFactory.FAILURE_REASON, metric));
if(host != null) tagList.add(new BasicTag(EVCacheMetricsFactory.FAILED_HOST, host));
counter = EVCacheMetricsFactory.getInstance().getCounter(EVCacheMetricsFactory.INTERNAL_FAIL, tagList);
counterMap.put(metric, counter);
}
counter.increment();
}
public void reportWrongKeyReturned(String hostName) {
incrementFailure(EVCacheMetricsFactory.WRONG_KEY_RETURNED, null, hostName);
}
private boolean ensureWriteQueueSize(MemcachedNode node, String key, EVCache.Call call) throws EVCacheException {
if (node instanceof EVCacheNode) {
final EVCacheNode evcNode = (EVCacheNode) node;
int i = 0;
while (true) {
final int size = evcNode.getWriteQueueSize();
final boolean canAddToOpQueue = size < maxWriteQueueSize;
if (log.isDebugEnabled()) log.debug("App : " + appName + "; zone : " + zone + "; key : " + key + "; WriteQSize : " + size);
if (canAddToOpQueue) break;
try {
Thread.sleep(writeBlock.get());
} catch (InterruptedException e) {
throw new EVCacheException("Thread was Interrupted", e);
}
if(i++ > 3) {
final String hostName;
if(evcNode.getSocketAddress() instanceof InetSocketAddress) {
hostName = ((InetSocketAddress)evcNode.getSocketAddress()).getHostName();
} else {
hostName = evcNode.getSocketAddress().toString();
}
incrementFailure(EVCacheMetricsFactory.INACTIVE_NODE, call, hostName);
if (log.isDebugEnabled()) log.debug("Node : " + evcNode + " for app : " + appName + "; zone : "
+ zone + " is not active. Will Fail Fast and the write will be dropped for key : " + key);
evcNode.shutdown();
return false;
}
}
}
return true;
}
private boolean validateNode(String key, boolean _throwException, EVCache.Call call) throws EVCacheException, EVCacheConnectException {
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
// First check if the node is active
if (node instanceof EVCacheNode) {
final EVCacheNode evcNode = (EVCacheNode) node;
final String hostName;
if(evcNode.getSocketAddress() instanceof InetSocketAddress) {
hostName = ((InetSocketAddress)evcNode.getSocketAddress()).getHostName();
} else {
hostName = evcNode.getSocketAddress().toString();
}
if (!evcNode.isAvailable(call)) {
incrementFailure(EVCacheMetricsFactory.INACTIVE_NODE, call, hostName);
if (log.isDebugEnabled()) log.debug("Node : " + node + " for app : " + appName + "; zone : " + zone
+ " is not active. Will Fail Fast so that we can fallback to Other Zone if available.");
if (_throwException) throw new EVCacheConnectException("Connection for Node : " + node + " for app : " + appName
+ "; zone : " + zone + " is not active");
return false;
}
final int size = evcNode.getReadQueueSize();
final boolean canAddToOpQueue = size < maxReadQueueSize.get();
if (log.isDebugEnabled()) log.debug("Current Read Queue Size - " + size + " for app " + appName + " & zone "
+ zone + " and node : " + evcNode);
if (!canAddToOpQueue) {
incrementFailure(EVCacheMetricsFactory.READ_QUEUE_FULL, call, hostName);
if (log.isDebugEnabled()) log.debug("Read Queue Full for Node : " + node + "; app : " + appName
+ "; zone : " + zone + "; Current Size : " + size + "; Max Size : " + maxReadQueueSize.get());
if (_throwException) throw new EVCacheReadQueueException("Read Queue Full for Node : " + node + "; app : "
+ appName + "; zone : " + zone + "; Current Size : " + size + "; Max Size : " + maxReadQueueSize.get());
return false;
}
}
return true;
}
private <T> ChunkDetails<T> getChunkDetails(String key) {
final List<String> firstKeys = new ArrayList<String>(2);
firstKeys.add(key);
final String firstKey = key + "_00";
firstKeys.add(firstKey);
try {
final Map<String, CachedData> metadataMap = evcacheMemcachedClient.asyncGetBulk(firstKeys, chunkingTranscoder, null)
.getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false);
if (metadataMap.containsKey(key)) {
return new ChunkDetails(null, null, false, metadataMap.get(key));
} else if (metadataMap.containsKey(firstKey)) {
final ChunkInfo ci = getChunkInfo(firstKey, (String) decodingTranscoder.decode(metadataMap.get(firstKey)));
if (ci == null) return null;
final List<String> keys = new ArrayList<>();
for (int i = 1; i < ci.getChunks(); i++) {
final String prefix = (i < 10) ? "0" : "";
keys.add(ci.getKey() + "_" + prefix + i);
}
return new ChunkDetails(keys, ci, true, null);
} else {
return null;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
private <T> Single<ChunkDetails<T>> getChunkDetails(String key, Scheduler scheduler) {
final List<String> firstKeys = new ArrayList<>(2);
firstKeys.add(key);
final String firstKey = key + "_00";
firstKeys.add(firstKey);
return evcacheMemcachedClient.asyncGetBulk(firstKeys, chunkingTranscoder, null)
.getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
.map(metadataMap -> {
if (metadataMap.containsKey(key)) {
return new ChunkDetails(null, null, false, metadataMap.get(key));
} else if (metadataMap.containsKey(firstKey)) {
final ChunkInfo ci = getChunkInfo(firstKey, (String) decodingTranscoder.decode(metadataMap.get(firstKey)));
if (ci == null) return null;
final List<String> keys = new ArrayList<>();
for (int i = 1; i < ci.getChunks(); i++) {
final String prefix = (i < 10) ? "0" : "";
keys.add(ci.getKey() + "_" + prefix + i);
}
return new ChunkDetails(keys, ci, true, null);
} else {
return null;
}
});
}
private <T> T assembleChunks(String key, boolean touch, int ttl, Transcoder<T> tc, boolean hasZF) {
try {
final ChunkDetails<T> cd = getChunkDetails(key);
if (cd == null) return null;
if (!cd.isChunked()) {
if (cd.getData() == null) return null;
final Transcoder<T> transcoder = (tc == null ? (Transcoder<T>) evcacheMemcachedClient.getTranscoder()
: tc);
return transcoder.decode((CachedData) cd.getData());
} else {
final List<String> keys = cd.getChunkKeys();
final ChunkInfo ci = cd.getChunkInfo();
final Map<String, CachedData> dataMap = evcacheMemcachedClient.asyncGetBulk(keys, chunkingTranscoder, null)
.getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false);
if (dataMap.size() != ci.getChunks() - 1) {
incrementFailure(EVCacheMetricsFactory.INCORRECT_CHUNKS, null);
return null;
}
final byte[] data = new byte[(ci.getChunks() - 2) * ci.getChunkSize() + (ci.getLastChunk() == 0 ? ci
.getChunkSize() : ci.getLastChunk())];
int index = 0;
for (int i = 0; i < keys.size(); i++) {
final String _key = keys.get(i);
final CachedData _cd = dataMap.get(_key);
if (log.isDebugEnabled()) log.debug("Chunk Key " + _key + "; Value : " + _cd);
if (_cd == null) continue;
final byte[] val = _cd.getData();
// If we expect a chunk to be present and it is null then return null immediately.
if (val == null) return null;
final int len = (i == keys.size() - 1) ? ((ci.getLastChunk() == 0 || ci.getLastChunk() > ci
.getChunkSize()) ? ci.getChunkSize() : ci.getLastChunk())
: val.length;
if (len != ci.getChunkSize() && i != keys.size() - 1) {
incrementFailure(EVCacheMetricsFactory.INVALID_CHUNK_SIZE, null);
if (log.isWarnEnabled()) log.warn("CHUNK_SIZE_ERROR : Chunks : " + ci.getChunks() + " ; "
+ "length : " + len + "; expectedLength : " + ci.getChunkSize() + " for key : " + _key);
}
if (len > 0) {
try {
System.arraycopy(val, 0, data, index, len);
} catch (Exception e) {
StringBuilder sb = new StringBuilder();
sb.append("ArrayCopyError - Key : " + _key + "; final data Size : " + data.length
+ "; copy array size : " + len + "; val size : " + val.length
+ "; key index : " + i + "; copy from : " + index + "; ChunkInfo : " + ci + "\n");
for (int j = 0; j < keys.size(); j++) {
final String skey = keys.get(j);
final byte[] sval = (byte[]) dataMap.get(skey).getData();
sb.append(skey + "=" + sval.length + "\n");
}
if (log.isWarnEnabled()) log.warn(sb.toString(), e);
throw e;
}
index += val.length;
if (touch) evcacheMemcachedClient.touch(_key, ttl);
}
}
final boolean checksumPass = checkCRCChecksum(data, ci, hasZF);
if (!checksumPass) return null;
final Transcoder<T> transcoder = (tc == null ? (Transcoder<T>) evcacheMemcachedClient.getTranscoder()
: tc);
return transcoder.decode(new CachedData(ci.getFlags(), data, Integer.MAX_VALUE));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
private <T> Single<T> assembleChunks(String key, boolean touch, int ttl, Transcoder<T> tc, boolean hasZF, Scheduler scheduler) {
return getChunkDetails(key, scheduler).flatMap(cd -> {
if (cd == null) return Single.just(null);
if (!cd.isChunked()) {
if (cd.getData() == null) return Single.just(null);
final Transcoder<T> transcoder = (tc == null ? (Transcoder<T>) evcacheMemcachedClient.getTranscoder() : tc);
return Single.just(transcoder.decode((CachedData) cd.getData()));
} else {
final List<String> keys = cd.getChunkKeys();
final ChunkInfo ci = cd.getChunkInfo();
return evcacheMemcachedClient.asyncGetBulk(keys, chunkingTranscoder, null)
.getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
.map(dataMap -> {
if (dataMap.size() != ci.getChunks() - 1) {
incrementFailure(EVCacheMetricsFactory.INCORRECT_CHUNKS, null);
return null;
}
final byte[] data = new byte[(ci.getChunks() - 2) * ci.getChunkSize() + (ci.getLastChunk() == 0 ? ci
.getChunkSize() : ci.getLastChunk())];
int index = 0;
for (int i = 0; i < keys.size(); i++) {
final String _key = keys.get(i);
final CachedData _cd = dataMap.get(_key);
if (log.isDebugEnabled()) log.debug("Chunk Key " + _key + "; Value : " + _cd);
if (_cd == null) continue;
final byte[] val = _cd.getData();
// If we expect a chunk to be present and it is null then return null immediately.
if (val == null) return null;
final int len = (i == keys.size() - 1) ? ((ci.getLastChunk() == 0 || ci.getLastChunk() > ci
.getChunkSize()) ? ci.getChunkSize() : ci.getLastChunk())
: val.length;
if (len != ci.getChunkSize() && i != keys.size() - 1) {
incrementFailure(EVCacheMetricsFactory.INVALID_CHUNK_SIZE, null);
if (log.isWarnEnabled()) log.warn("CHUNK_SIZE_ERROR : Chunks : " + ci.getChunks() + " ; "
+ "length : " + len + "; expectedLength : " + ci.getChunkSize() + " for key : " + _key);
}
if (len > 0) {
try {
System.arraycopy(val, 0, data, index, len);
} catch (Exception e) {
StringBuilder sb = new StringBuilder();
sb.append("ArrayCopyError - Key : " + _key + "; final data Size : " + data.length
+ "; copy array size : " + len + "; val size : " + val.length
+ "; key index : " + i + "; copy from : " + index + "; ChunkInfo : " + ci + "\n");
for (int j = 0; j < keys.size(); j++) {
final String skey = keys.get(j);
final byte[] sval = (byte[]) dataMap.get(skey).getData();
sb.append(skey + "=" + sval.length + "\n");
}
if (log.isWarnEnabled()) log.warn(sb.toString(), e);
throw e;
}
System.arraycopy(val, 0, data, index, len);
index += val.length;
if (touch) evcacheMemcachedClient.touch(_key, ttl);
}
}
final boolean checksumPass = checkCRCChecksum(data, ci, hasZF);
if (!checksumPass) return null;
final Transcoder<T> transcoder = (tc == null ? (Transcoder<T>) evcacheMemcachedClient.getTranscoder()
: tc);
return transcoder.decode(new CachedData(ci.getFlags(), data, Integer.MAX_VALUE));
});
}
});
}
private boolean checkCRCChecksum(byte[] data, final ChunkInfo ci, boolean hasZF) {
if (data == null || data.length == 0) return false;
final Checksum checksum = new CRC32();
checksum.update(data, 0, data.length);
final long currentChecksum = checksum.getValue();
final long expectedChecksum = ci.getChecksum();
if (log.isDebugEnabled()) log.debug("CurrentChecksum : " + currentChecksum + "; ExpectedChecksum : "
+ expectedChecksum + " for key : " + ci.getKey());
if (currentChecksum != expectedChecksum) {
if (!hasZF) {
if (log.isWarnEnabled()) log.warn("CHECKSUM_ERROR : Chunks : " + ci.getChunks() + " ; "
+ "currentChecksum : " + currentChecksum + "; expectedChecksum : " + expectedChecksum
+ " for key : " + ci.getKey());
incrementFailure(EVCacheMetricsFactory.CHECK_SUM_ERROR, null);
}
return false;
}
return true;
}
private ChunkInfo getChunkInfo(String firstKey, String metadata) {
if (metadata == null) return null;
final String[] metaItems = metadata.split(":");
if (metaItems.length != 5) return null;
final String key = firstKey.substring(0, firstKey.length() - 3);
final ChunkInfo ci = new ChunkInfo(Integer.parseInt(metaItems[0]), Integer.parseInt(metaItems[1]), Integer
.parseInt(metaItems[2]), Integer.parseInt(metaItems[3]), key, Long
.parseLong(metaItems[4]));
return ci;
}
private <T> Map<String, T> assembleChunks(Collection<String> keyList, Transcoder<T> tc, boolean hasZF) {
final List<String> firstKeys = new ArrayList<>();
for (String key : keyList) {
firstKeys.add(key);
firstKeys.add(key + "_00");
}
try {
final Map<String, CachedData> metadataMap = evcacheMemcachedClient.asyncGetBulk(firstKeys, chunkingTranscoder, null)
.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, false, false);
if (metadataMap == null) return null;
final Map<String, T> returnMap = new HashMap<>(keyList.size() * 2);
for (String key : keyList) {
if (metadataMap.containsKey(key)) {
CachedData val = metadataMap.remove(key);
returnMap.put(key, tc.decode(val));
}
}
final List<String> allKeys = new ArrayList<>();
final Map<ChunkInfo, SimpleEntry<List<String>, byte[]>> responseMap = new HashMap<>();
for (Entry<String, CachedData> entry : metadataMap.entrySet()) {
final String firstKey = entry.getKey();
final String metadata = (String) decodingTranscoder.decode(entry.getValue());
if (metadata == null) continue;
final ChunkInfo ci = getChunkInfo(firstKey, metadata);
if (ci != null) {
final List<String> ciKeys = new ArrayList<>();
for (int i = 1; i < ci.getChunks(); i++) {
final String prefix = (i < 10) ? "0" : "";
final String _key = ci.getKey() + "_" + prefix + i;
allKeys.add(_key);
ciKeys.add(_key);
}
final byte[] data = new byte[(ci.getChunks() - 2) * ci.getChunkSize() + ci.getLastChunk()];
responseMap.put(ci, new SimpleEntry<>(ciKeys, data));
}
}
final Map<String, CachedData> dataMap = evcacheMemcachedClient.asyncGetBulk(allKeys, chunkingTranscoder, null)
.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, false, false);
for (Entry<ChunkInfo, SimpleEntry<List<String>, byte[]>> entry : responseMap.entrySet()) {
final ChunkInfo ci = entry.getKey();
final SimpleEntry<List<String>, byte[]> pair = entry.getValue();
final List<String> ciKeys = pair.getKey();
byte[] data = pair.getValue();
int index = 0;
for (int i = 0; i < ciKeys.size(); i++) {
final String _key = ciKeys.get(i);
final CachedData cd = dataMap.get(_key);
if (log.isDebugEnabled()) log.debug("Chunk Key " + _key + "; Value : " + cd);
if (cd == null) continue;
final byte[] val = cd.getData();
if (val == null) {
data = null;
break;
}
final int len = (i == ciKeys.size() - 1) ? ((ci.getLastChunk() == 0 || ci.getLastChunk() > ci
.getChunkSize()) ? ci.getChunkSize() : ci.getLastChunk())
: val.length;
try {
System.arraycopy(val, 0, data, index, len);
} catch (Exception e) {
StringBuilder sb = new StringBuilder();
sb.append("ArrayCopyError - Key : " + _key + "; final data Size : " + data.length
+ "; copy array size : " + len + "; val size : " + val.length
+ "; key index : " + i + "; copy from : " + index + "; ChunkInfo : " + ci + "\n");
for (int j = 0; j < ciKeys.size(); j++) {
final String skey = ciKeys.get(j);
final byte[] sval = dataMap.get(skey).getData();
sb.append(skey + "=" + sval.length + "\n");
}
if (log.isWarnEnabled()) log.warn(sb.toString(), e);
throw e;
}
index += val.length;
}
final boolean checksumPass = checkCRCChecksum(data, ci, hasZF);
if (data != null && checksumPass) {
final CachedData cd = new CachedData(ci.getFlags(), data, Integer.MAX_VALUE);
returnMap.put(ci.getKey(), tc.decode(cd));
} else {
returnMap.put(ci.getKey(), null);
}
}
return returnMap;
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
private <T> Single<Map<String, T>> assembleChunks(Collection<String> keyList, Transcoder<T> tc, boolean hasZF, Scheduler scheduler) {
final List<String> firstKeys = new ArrayList<>();
for (String key : keyList) {
firstKeys.add(key);
firstKeys.add(key + "_00");
}
return evcacheMemcachedClient.asyncGetBulk(firstKeys, chunkingTranscoder, null)
.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
.flatMap(metadataMap -> {
if (metadataMap == null) return null;
final Map<String, T> returnMap = new HashMap<>(keyList.size() * 2);
for (String key : keyList) {
if (metadataMap.containsKey(key)) {
CachedData val = metadataMap.remove(key);
returnMap.put(key, tc.decode(val));
}
}
final List<String> allKeys = new ArrayList<>();
final Map<ChunkInfo, SimpleEntry<List<String>, byte[]>> responseMap = new HashMap<>();
for (Entry<String, CachedData> entry : metadataMap.entrySet()) {
final String firstKey = entry.getKey();
final String metadata = (String) decodingTranscoder.decode(entry.getValue());
if (metadata == null) continue;
final ChunkInfo ci = getChunkInfo(firstKey, metadata);
if (ci != null) {
final List<String> ciKeys = new ArrayList<>();
for (int i = 1; i < ci.getChunks(); i++) {
final String prefix = (i < 10) ? "0" : "";
final String _key = ci.getKey() + "_" + prefix + i;
allKeys.add(_key);
ciKeys.add(_key);
}
final byte[] data = new byte[(ci.getChunks() - 2) * ci.getChunkSize() + ci.getLastChunk()];
responseMap.put(ci, new SimpleEntry<>(ciKeys, data));
}
}
return evcacheMemcachedClient.asyncGetBulk(allKeys, chunkingTranscoder, null)
.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
.map(dataMap -> {
for (Entry<ChunkInfo, SimpleEntry<List<String>, byte[]>> entry : responseMap.entrySet()) {
final ChunkInfo ci = entry.getKey();
final SimpleEntry<List<String>, byte[]> pair = entry.getValue();
final List<String> ciKeys = pair.getKey();
byte[] data = pair.getValue();
int index = 0;
for (int i = 0; i < ciKeys.size(); i++) {
final String _key = ciKeys.get(i);
final CachedData cd = dataMap.get(_key);
if (log.isDebugEnabled()) log.debug("Chunk Key " + _key + "; Value : " + cd);
if (cd == null) continue;
final byte[] val = cd.getData();
if (val == null) {
data = null;
break;
}
final int len = (i == ciKeys.size() - 1) ? ((ci.getLastChunk() == 0 || ci.getLastChunk() > ci
.getChunkSize()) ? ci.getChunkSize() : ci.getLastChunk())
: val.length;
try {
System.arraycopy(val, 0, data, index, len);
} catch (Exception e) {
StringBuilder sb = new StringBuilder();
sb.append("ArrayCopyError - Key : " + _key + "; final data Size : " + data.length
+ "; copy array size : " + len + "; val size : " + val.length
+ "; key index : " + i + "; copy from : " + index + "; ChunkInfo : " + ci + "\n");
for (int j = 0; j < ciKeys.size(); j++) {
final String skey = ciKeys.get(j);
final byte[] sval = dataMap.get(skey).getData();
sb.append(skey + "=" + sval.length + "\n");
}
if (log.isWarnEnabled()) log.warn(sb.toString(), e);
throw e;
}
index += val.length;
}
final boolean checksumPass = checkCRCChecksum(data, ci, hasZF);
if (data != null && checksumPass) {
final CachedData cd = new CachedData(ci.getFlags(), data, Integer.MAX_VALUE);
returnMap.put(ci.getKey(), tc.decode(cd));
} else {
returnMap.put(ci.getKey(), null);
}
}
return returnMap;
});
});
}
private CachedData[] createChunks(CachedData cd, String key) {
final int cSize = chunkSize.get();
if ((key.length() + 3) > cSize) throw new IllegalArgumentException("The chunksize " + cSize
+ " is smaller than the key size. Will not be able to proceed. key size = "
+ key.length());
final int len = cd.getData().length;
/* the format of headers in memcached */
// Key size + 1 + Header( Flags (Characters Number) + Key (Characters Numbers) + 2 bytes ( \r\n ) + 4 bytes (2 spaces and 1 \r)) + Chunk Size + CAS Size
// final int overheadSize = key.length() // Key Size
// + 1 // Space
// + 4 // Flags (Characters Number)
// + 4 // Key (Characters Numbers)
// + 2 // /r/n
// + 4 // 2 spaces and 1 \r
// + 48 // Header Size
// + 8; // CAS
final int overheadSize = key.length() + 71 + 3;
// 3 because we will suffix _00, _01 ... _99; 68 is the size of the memcached header
final int actualChunkSize = cSize - overheadSize;
int lastChunkSize = len % actualChunkSize;
final int numOfChunks = len / actualChunkSize + ((lastChunkSize > 0) ? 1 : 0) + 1;
final CachedData[] chunkData = new CachedData[numOfChunks];
if (lastChunkSize == 0) lastChunkSize = actualChunkSize;
final long sTime = System.nanoTime();
final Checksum checksum = new CRC32();
checksum.update(cd.getData(), 0, len);
final long checkSumValue = checksum.getValue();
int srcPos = 0;
if (log.isDebugEnabled()) log.debug("Ths size of data is " + len + " ; we will create " + (numOfChunks - 1)
+ " of " + actualChunkSize + " bytes. Checksum : "
+ checkSumValue + "; Checksum Duration : " + (System.nanoTime() - sTime));
chunkData[0] = decodingTranscoder.encode(numOfChunks + ":" + actualChunkSize + ":" + lastChunkSize + ":" + cd
.getFlags() + ":" + checkSumValue);
for (int i = 1; i < numOfChunks; i++) {
int lengthOfArray = actualChunkSize;
if (srcPos + actualChunkSize > len) {
lengthOfArray = len - srcPos;
}
byte[] dest = new byte[actualChunkSize];
System.arraycopy(cd.getData(), srcPos, dest, 0, lengthOfArray);
if (actualChunkSize > lengthOfArray) {
for (int j = lengthOfArray; j < actualChunkSize; j++) {
dest[j] = Character.UNASSIGNED;// Adding filler data
}
}
srcPos += lengthOfArray;
//chunkData[i] = decodingTranscoder.encode(dest);
chunkData[i] = new CachedData(SPECIAL_BYTEARRAY, dest, Integer.MAX_VALUE);
}
EVCacheMetricsFactory.getInstance().getDistributionSummary(EVCacheMetricsFactory.INTERNAL_NUM_CHUNK_SIZE, getTagList()).record(numOfChunks);
EVCacheMetricsFactory.getInstance().getDistributionSummary(EVCacheMetricsFactory.INTERNAL_CHUNK_DATA_SIZE, getTagList()).record(len);
return chunkData;
}
/**
* Retrieves all the chunks as is. This is mainly used for debugging.
*
* @param key
* @return Returns all the chunks retrieved.
* @throws EVCacheReadQueueException
* @throws EVCacheException
* @throws Exception
*/
public Map<String, CachedData> getAllChunks(String key) throws EVCacheReadQueueException, EVCacheException, Exception {
try {
final ChunkDetails<Object> cd = getChunkDetails(key);
if(log.isDebugEnabled()) log.debug("Chunkdetails " + cd);
if (cd == null) return null;
if (!cd.isChunked()) {
Map<String, CachedData> rv = new HashMap<String, CachedData>();
rv.put(key, (CachedData) cd.getData());
if(log.isDebugEnabled()) log.debug("Data : " + rv);
return rv;
} else {
final List<String> keys = cd.getChunkKeys();
if(log.isDebugEnabled()) log.debug("Keys - " + keys);
final Map<String, CachedData> dataMap = evcacheMemcachedClient.asyncGetBulk(keys, chunkingTranscoder, null)
.getSome(readTimeout.get().intValue(), TimeUnit.MILLISECONDS, false, false);
if(log.isDebugEnabled()) log.debug("Datamap " + dataMap);
return dataMap;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
public long incr(String key, long by, long defaultVal, int timeToLive) throws EVCacheException {
return evcacheMemcachedClient.incr(key, by, defaultVal, timeToLive);
}
public long decr(String key, long by, long defaultVal, int timeToLive) throws EVCacheException {
return evcacheMemcachedClient.decr(key, by, defaultVal, timeToLive);
}
public <T> CompletableFuture<T> getAsync(String key, Transcoder<T> tc) {
if(log.isDebugEnabled()) log.debug("fetching data getAsync {}", key);
return evcacheMemcachedClient
.asyncGet(key, tc, null)
.getAsync(readTimeout.get(), TimeUnit.MILLISECONDS);
}
public <T> T get(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF, boolean chunked) throws Exception {
if (chunked) {
return assembleChunks(key, false, 0, tc, hasZF);
} else {
return evcacheMemcachedClient.asyncGet(key, tc, null).get(readTimeout.get(),
TimeUnit.MILLISECONDS, _throwException, hasZF);
}
}
public <T> T get(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF) throws Exception {
if (!validateNode(key, _throwException, Call.GET)) {
if(ignoreInactiveNodes.get()) {
incrementFailure(EVCacheMetricsFactory.IGNORE_INACTIVE_NODES, Call.GET);
return pool.getEVCacheClientForReadExclude(serverGroup).get(key, tc, _throwException, hasZF, enableChunking.get());
} else {
return null;
}
}
return get(key, tc, _throwException, hasZF, enableChunking.get());
}
public <T> Single<T> get(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF, boolean chunked, Scheduler scheduler) throws Exception {
if (chunked) {
return assembleChunks(key, _throwException, 0, tc, hasZF, scheduler);
} else {
return evcacheMemcachedClient.asyncGet(key, tc, null)
.get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
}
}
public <T> Single<T> get(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF, Scheduler scheduler) {
try {
if (!validateNode(key, _throwException, Call.GET)) {
if(ignoreInactiveNodes.get()) {
incrementFailure(EVCacheMetricsFactory.IGNORE_INACTIVE_NODES, Call.GET);
return pool.getEVCacheClientForReadExclude(serverGroup).get(key, tc, _throwException, hasZF, enableChunking.get(), scheduler);
} else {
return Single.just(null);
}
}
return get(key, tc, _throwException, hasZF, enableChunking.get(), scheduler);
} catch (Throwable e) {
return Single.error(e);
}
}
public <T> T getAndTouch(String key, Transcoder<T> tc, int timeToLive, boolean _throwException, boolean hasZF) throws Exception {
EVCacheMemcachedClient _client = evcacheMemcachedClient;
if (!validateNode(key, _throwException, Call.GET_AND_TOUCH)) {
if(ignoreInactiveNodes.get()) {
incrementFailure(EVCacheMetricsFactory.IGNORE_INACTIVE_NODES, Call.GET_AND_TOUCH);
_client = pool.getEVCacheClientForReadExclude(serverGroup).getEVCacheMemcachedClient();
} else {
return null;
}
}
if (tc == null) tc = (Transcoder<T>) getTranscoder();
final T returnVal;
if (enableChunking.get()) {
return assembleChunks(key, false, 0, tc, hasZF);
} else {
if(ignoreTouch.get()) {
returnVal = _client.asyncGet(key, tc, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
} else {
final CASValue<T> value = _client.asyncGetAndTouch(key, timeToLive, tc).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
returnVal = (value == null) ? null : value.getValue();
}
}
return returnVal;
}
public <T> Single<T> getAndTouch(String key, Transcoder<T> transcoder, int timeToLive, boolean _throwException, boolean hasZF, Scheduler scheduler) {
try {
EVCacheMemcachedClient client = evcacheMemcachedClient;
if (!validateNode(key, _throwException, Call.GET_AND_TOUCH)) {
if(ignoreInactiveNodes.get()) {
incrementFailure(EVCacheMetricsFactory.IGNORE_INACTIVE_NODES, Call.GET_AND_TOUCH);
client = pool.getEVCacheClientForReadExclude(serverGroup).getEVCacheMemcachedClient();
} else {
return null;
}
}
final EVCacheMemcachedClient _client = client;
final Transcoder<T> tc = (transcoder == null) ? (Transcoder<T>) getTranscoder(): transcoder;
if (enableChunking.get()) {
return assembleChunks(key, false, 0, tc, hasZF, scheduler);
} else {
return _client.asyncGetAndTouch(key, timeToLive, tc)
.get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler)
.map(value -> (value == null) ? null : value.getValue());
}
} catch (Throwable e) {
return Single.error(e);
}
}
public <T> Map<String, T> getBulk(Collection<String> _canonicalKeys, Transcoder<T> tc, boolean _throwException,
boolean hasZF) throws Exception {
final Collection<String> canonicalKeys = validateReadQueueSize(_canonicalKeys, Call.BULK);
final Map<String, T> returnVal;
try {
if (tc == null) tc = (Transcoder<T>) getTranscoder();
if (enableChunking.get()) {
returnVal = assembleChunks(_canonicalKeys, tc, hasZF);
} else {
returnVal = evcacheMemcachedClient.asyncGetBulk(canonicalKeys, tc, null)
.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
}
} catch (Exception e) {
if (_throwException) throw e;
return Collections.<String, T> emptyMap();
}
return returnVal;
}
public <T> CompletableFuture<Map<String, T>> getAsyncBulk(Collection<String> _canonicalKeys, Transcoder<T> tc) {
final Collection<String> canonicalKeys = validateReadQueueSize(_canonicalKeys, Call.COMPLETABLE_FUTURE_GET_BULK);
if (tc == null) tc = (Transcoder<T>) getTranscoder();
return evcacheMemcachedClient
.asyncGetBulk(canonicalKeys, tc, null)
.getAsyncSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS);
}
public <T> Single<Map<String, T>> getBulk(Collection<String> _canonicalKeys, final Transcoder<T> transcoder, boolean _throwException,
boolean hasZF, Scheduler scheduler) {
try {
final Collection<String> canonicalKeys = validateReadQueueSize(_canonicalKeys, Call.BULK);
final Transcoder<T> tc = (transcoder == null) ? (Transcoder<T>) getTranscoder() : transcoder;
if (enableChunking.get()) {
return assembleChunks(_canonicalKeys, tc, hasZF, scheduler);
} else {
return evcacheMemcachedClient.asyncGetBulk(canonicalKeys, tc, null)
.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
}
} catch (Throwable e) {
return Single.error(e);
}
}
public <T> Future<Boolean> append(String key, T value) throws Exception {
if (enableChunking.get()) throw new EVCacheException(
"This operation is not supported as chunking is enabled on this EVCacheClient.");
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.APPEND)) return getDefaultFuture();
return evcacheMemcachedClient.append(key, value);
}
public Future<Boolean> set(String key, CachedData value, int timeToLive) throws Exception {
return _set(key, value, timeToLive, null);
}
public Future<Boolean> set(String key, CachedData cd, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
return _set(key, cd, timeToLive, evcacheLatch);
}
@Deprecated
public <T> Future<Boolean> set(String key, T value, int timeToLive) throws Exception {
return set(key, value, timeToLive, null);
}
@Deprecated
public <T> Future<Boolean> set(String key, T value, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
final CachedData cd;
if (value instanceof CachedData) {
cd = (CachedData) value;
} else {
cd = getTranscoder().encode(value);
}
return _set(key, cd, timeToLive, evcacheLatch);
}
private Future<Boolean> _set(String key, CachedData value, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.SET)) {
if (log.isInfoEnabled()) log.info("Node : " + node + " is not active. Failing fast and dropping the write event.");
final ListenableFuture<Boolean, OperationCompletionListener> defaultFuture = (ListenableFuture<Boolean, OperationCompletionListener>) getDefaultFuture();
if (evcacheLatch != null && evcacheLatch instanceof EVCacheLatchImpl && !isInWriteOnly()) ((EVCacheLatchImpl) evcacheLatch).addFuture(defaultFuture);
return defaultFuture;
}
try {
final int dataSize = ((CachedData) value).getData().length;
if (enableChunking.get()) {
if (dataSize > chunkSize.get()) {
final CachedData[] cd = createChunks(value, key);
final int len = cd.length;
final OperationFuture<Boolean>[] futures = new OperationFuture[len];
for (int i = 0; i < cd.length; i++) {
final String prefix = (i < 10) ? "0" : "";
futures[i] = evcacheMemcachedClient.set(key + "_" + prefix + i, timeToLive, cd[i], null, null);
}
// ensure we are deleting the unchunked key if it exists.
// Ignore return value since it may not exist.
evcacheMemcachedClient.delete(key);
return new EVCacheFutures(futures, key, appName, serverGroup, evcacheLatch);
} else {
// delete all the chunks if they exist as the
// data is moving from chunked to unchunked
delete(key);
return evcacheMemcachedClient.set(key, timeToLive, value, null, evcacheLatch);
}
} else {
return evcacheMemcachedClient.set(key, timeToLive, value, null, evcacheLatch);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw e;
}
}
private Boolean shouldHashKey() {
return hashKeyByServerGroup.get();
}
public HashingAlgorithm getHashingAlgorithm() {
if (null == shouldHashKey()) {
// hash key property is not set at the client level
return null;
}
// return NO_HASHING if hashing is explicitly disabled at client level
return shouldHashKey() ? KeyHasher.getHashingAlgorithmFromString(hashingAlgo.get()) : HashingAlgorithm.NO_HASHING;
}
public <T> Future<Boolean> appendOrAdd(String key, CachedData value, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.APPEND_OR_ADD)) {
if (log.isInfoEnabled()) log.info("Node : " + node + " is not active. Failing fast and dropping the write event.");
final ListenableFuture<Boolean, OperationCompletionListener> defaultFuture = (ListenableFuture<Boolean, OperationCompletionListener>) getDefaultFuture();
if (evcacheLatch != null && evcacheLatch instanceof EVCacheLatchImpl && !isInWriteOnly()) ((EVCacheLatchImpl) evcacheLatch).addFuture(defaultFuture);
return defaultFuture;
}
try {
return evcacheMemcachedClient.asyncAppendOrAdd(key, timeToLive, value, evcacheLatch);
} catch (Exception e) {
log.error(e.getMessage(), e);
throw e;
}
}
public Future<Boolean> replace(String key, CachedData cd, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
return _replace(key, cd, timeToLive, evcacheLatch);
}
@Deprecated
public <T> Future<Boolean> replace(String key, T value, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
final CachedData cd;
if (value instanceof CachedData) {
cd = (CachedData) value;
} else {
cd = getTranscoder().encode(value);
}
return _replace(key, cd, timeToLive, evcacheLatch);
}
private Future<Boolean> _replace(String key, CachedData value, int timeToLive, EVCacheLatch evcacheLatch) throws Exception {
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.REPLACE)) {
if (log.isInfoEnabled()) log.info("Node : " + node + " is not active. Failing fast and dropping the replace event.");
final ListenableFuture<Boolean, OperationCompletionListener> defaultFuture = (ListenableFuture<Boolean, OperationCompletionListener>) getDefaultFuture();
if (evcacheLatch != null && evcacheLatch instanceof EVCacheLatchImpl && !isInWriteOnly()) ((EVCacheLatchImpl) evcacheLatch).addFuture(defaultFuture);
return defaultFuture;
}
try {
final int dataSize = ((CachedData) value).getData().length;
if (enableChunking.get() && dataSize > chunkSize.get()) {
final CachedData[] cd = createChunks(value, key);
final int len = cd.length;
final OperationFuture<Boolean>[] futures = new OperationFuture[len];
for (int i = 0; i < cd.length; i++) {
final String prefix = (i < 10) ? "0" : "";
futures[i] = evcacheMemcachedClient.replace(key + "_" + prefix + i, timeToLive, cd[i], null, null);
}
return new EVCacheFutures(futures, key, appName, serverGroup, evcacheLatch);
} else {
return evcacheMemcachedClient.replace(key, timeToLive, value, null, evcacheLatch);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw e;
}
}
private Future<Boolean> _add(String key, int exp, CachedData value, EVCacheLatch latch) throws Exception {
if (enableChunking.get()) throw new EVCacheException("This operation is not supported as chunking is enabled on this EVCacheClient.");
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.ADD)) return getDefaultFuture();
return evcacheMemcachedClient.add(key, exp, value, null, latch);
}
@Deprecated
public <T> Future<Boolean> add(String key, int exp, T value) throws Exception {
final CachedData cd;
if (value instanceof CachedData) {
cd = (CachedData) value;
} else {
cd = getTranscoder().encode(value);
}
return _add(key, exp, cd, null);
}
@Deprecated
public <T> Future<Boolean> add(String key, int exp, T value, Transcoder<T> tc) throws Exception {
final CachedData cd;
if (value instanceof CachedData) {
cd = (CachedData) value;
} else {
if(tc == null) {
cd = getTranscoder().encode(value);
} else {
cd = tc.encode(value);
}
}
return _add(key, exp, cd, null);
}
@Deprecated
public <T> Future<Boolean> add(String key, int exp, T value, final Transcoder<T> tc, EVCacheLatch latch) throws Exception {
final CachedData cd;
if (value instanceof CachedData) {
cd = (CachedData) value;
} else {
if(tc == null) {
cd = getTranscoder().encode(value);
} else {
cd = tc.encode(value);
}
}
return _add(key, exp, cd, latch);
}
public Future<Boolean> add(String key, int exp, CachedData value, EVCacheLatch latch) throws Exception {
return _add(key, exp, value, latch);
}
public <T> Future<Boolean> touch(String key, int timeToLive) throws Exception {
return touch(key, timeToLive, null);
}
public <T> Future<Boolean> touch(String key, int timeToLive, EVCacheLatch latch) throws Exception {
if(ignoreTouch.get()) {
final ListenableFuture<Boolean, OperationCompletionListener> sf = new SuccessFuture();
if (latch != null && latch instanceof EVCacheLatchImpl && !isInWriteOnly()) ((EVCacheLatchImpl) latch).addFuture(sf);
return sf;
}
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.TOUCH)) {
final ListenableFuture<Boolean, OperationCompletionListener> defaultFuture = (ListenableFuture<Boolean, OperationCompletionListener>) getDefaultFuture();
if (latch != null && latch instanceof EVCacheLatchImpl && !isInWriteOnly()) ((EVCacheLatchImpl) latch).addFuture(defaultFuture);
return defaultFuture;
}
if (enableChunking.get()) {
final ChunkDetails<?> cd = getChunkDetails(key);
if (cd.isChunked()) {
final List<String> keys = cd.getChunkKeys();
OperationFuture<Boolean>[] futures = new OperationFuture[keys.size() + 1];
futures[0] = evcacheMemcachedClient.touch(key + "_00", timeToLive, latch);
for (int i = 0; i < keys.size(); i++) {
final String prefix = (i < 10) ? "0" : "";
final String _key = key + "_" + prefix + i;
futures[i + 1] = evcacheMemcachedClient.touch(_key, timeToLive, latch);
}
return new EVCacheFutures(futures, key, appName, serverGroup, latch);
} else {
return evcacheMemcachedClient.touch(key, timeToLive, latch);
}
} else {
return evcacheMemcachedClient.touch(key, timeToLive, latch);
}
}
public <T> Future<T> asyncGet(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF)
throws Exception {
if (enableChunking.get()) throw new EVCacheException(
"This operation is not supported as chunking is enabled on this EVCacheClient.");
if (!validateNode(key, _throwException, Call.ASYNC_GET)) return null;
if (tc == null) tc = (Transcoder<T>) getTranscoder();
return evcacheMemcachedClient.asyncGet(key, tc, null);
}
public Future<Boolean> delete(String key) throws Exception {
return delete(key, null);
}
public Future<Boolean> delete(String key, EVCacheLatch latch) throws Exception {
final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key);
if (!ensureWriteQueueSize(node, key, Call.DELETE)) {
final ListenableFuture<Boolean, OperationCompletionListener> defaultFuture = (ListenableFuture<Boolean, OperationCompletionListener>) getDefaultFuture();
if (latch != null && latch instanceof EVCacheLatchImpl && !isInWriteOnly()) ((EVCacheLatchImpl) latch).addFuture(defaultFuture);
return defaultFuture;
}
if (enableChunking.get()) {
final ChunkDetails<?> cd = getChunkDetails(key);
if (cd == null) {
// Paranoid delete : cases where get fails and we ensure the first key is deleted just in case
return evcacheMemcachedClient.delete(key + "_00", latch);
}
if (!cd.isChunked()) {
return evcacheMemcachedClient.delete(key, latch);
} else {
final List<String> keys = cd.getChunkKeys();
OperationFuture<Boolean>[] futures = new OperationFuture[keys.size() + 1];
futures[0] = evcacheMemcachedClient.delete(key + "_00");
for (int i = 0; i < keys.size(); i++) {
futures[i + 1] = evcacheMemcachedClient.delete(keys.get(i), null);
}
return new EVCacheFutures(futures, key, appName, serverGroup, latch);
}
} else {
return evcacheMemcachedClient.delete(key, latch);
}
}
public boolean removeConnectionObserver() {
try {
boolean removed = evcacheMemcachedClient.removeObserver(connectionObserver);
if (removed) connectionObserver = null;
return removed;
} catch (Exception e) {
return false;
}
}
public boolean shutdown(long timeout, TimeUnit unit) {
if(shutdown) return true;
shutdown = true;
try {
evcacheMemcachedClient.shutdown(timeout, unit);
} catch(Throwable t) {
log.error("Exception while shutting down", t);
}
return true;
}
public EVCacheConnectionObserver getConnectionObserver() {
return this.connectionObserver;
}
public ConnectionFactory getConnectionFactory() {
return connectionFactory;
}
public String getAppName() {
return appName;
}
public String getZone() {
return zone;
}
public int getId() {
return id;
}
public ServerGroup getServerGroup() {
return serverGroup;
}
public String getServerGroupName() {
return (serverGroup == null ? "NA" : serverGroup.getName());
}
public boolean isShutdown() {
return this.shutdown;
}
public boolean isInWriteOnly(){
return pool.isInWriteOnly(getServerGroup());
}
public Map<SocketAddress, Map<String, String>> getStats(String cmd) {
return evcacheMemcachedClient.getStats(cmd);
}
public Map<SocketAddress, String> execCmd(String cmd, String[] ips) {
return evcacheMemcachedClient.execCmd(cmd, ips);
}
public Map<SocketAddress, String> getVersions() {
return evcacheMemcachedClient.getVersions();
}
public Future<Boolean> flush() {
return evcacheMemcachedClient.flush();
}
public Transcoder<Object> getTranscoder() {
return evcacheMemcachedClient.getTranscoder();
}
public ConnectionFactory getEVCacheConnectionFactory() {
return this.connectionFactory;
}
public NodeLocator getNodeLocator() {
return this.evcacheMemcachedClient.getNodeLocator();
}
static class SuccessFuture implements ListenableFuture<Boolean, OperationCompletionListener> {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return true;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public Boolean get() throws InterruptedException, ExecutionException {
return Boolean.TRUE;
}
@Override
public Boolean get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return Boolean.TRUE;
}
@Override
public Future<Boolean> addListener(OperationCompletionListener listener) {
return this;
}
@Override
public Future<Boolean> removeListener(OperationCompletionListener listener) {
return this;
}
}
static class DefaultFuture implements ListenableFuture<Boolean, OperationCompletionListener> {
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public Boolean get() throws InterruptedException, ExecutionException {
return Boolean.FALSE;
}
@Override
public Boolean get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException,
TimeoutException {
return Boolean.FALSE;
}
@Override
public Future<Boolean> addListener(OperationCompletionListener listener) {
return this;
}
@Override
public Future<Boolean> removeListener(OperationCompletionListener listener) {
return this;
}
}
private Future<Boolean> getDefaultFuture() {
final Future<Boolean> defaultFuture = new DefaultFuture();
return defaultFuture;
}
public String toString() {
return "App : " + appName + "; Zone : " + zone + "; Id : " + id + "; " + serverGroup.toString() + "; Nodes : "
+ memcachedNodesInZone.toString();
}
public EVCacheMemcachedClient getEVCacheMemcachedClient() {
return evcacheMemcachedClient;
}
public List<InetSocketAddress> getMemcachedNodesInZone() {
return memcachedNodesInZone;
}
public int getMaxWriteQueueSize() {
return maxWriteQueueSize;
}
public Property<Integer> getReadTimeout() {
return readTimeout;
}
public Property<Integer> getBulkReadTimeout() {
return bulkReadTimeout;
}
public Property<Integer> getMaxReadQueueSize() {
return maxReadQueueSize;
}
public Property<Boolean> getEnableChunking() {
return enableChunking;
}
public Property<Integer> getChunkSize() {
return chunkSize;
}
public ChunkTranscoder getChunkingTranscoder() {
return chunkingTranscoder;
}
public EVCacheSerializingTranscoder getDecodingTranscoder() {
return decodingTranscoder;
}
public EVCacheClientPool getPool() {
return pool;
}
public EVCacheServerGroupConfig getEVCacheConfig() {
return config;
}
static class ChunkDetails<T> {
final List<String> chunkKeys;
final ChunkInfo chunkInfo;
final boolean chunked;
final T data;
public ChunkDetails(List<String> chunkKeys, ChunkInfo chunkInfo, boolean chunked, T data) {
super();
this.chunkKeys = chunkKeys;
this.chunkInfo = chunkInfo;
this.chunked = chunked;
this.data = data;
}
public List<String> getChunkKeys() {
return chunkKeys;
}
public ChunkInfo getChunkInfo() {
return chunkInfo;
}
public boolean isChunked() {
return chunked;
}
public T getData() {
return data;
}
@Override
public String toString() {
return "ChunkDetails [chunkKeys=" + chunkKeys + ", chunkInfo=" + chunkInfo + ", chunked=" + chunked
+ ", data=" + data + "]";
}
}
static class ChunkInfo {
final int chunks;
final int chunkSize;
final int lastChunk;
final int flags;
final String key;
final long checksum;
public ChunkInfo(int chunks, int chunkSize, int lastChunk, int flags, String firstKey, long checksum) {
super();
this.chunks = chunks;
this.chunkSize = chunkSize;
this.lastChunk = lastChunk;
this.flags = flags;
this.key = firstKey;
this.checksum = checksum;
}
public int getChunks() {
return chunks;
}
public int getChunkSize() {
return chunkSize;
}
public int getLastChunk() {
return lastChunk;
}
public int getFlags() {
return flags;
}
public String getKey() {
return key;
}
public long getChecksum() {
return checksum;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{\"chunks\":\"");
builder.append(chunks);
builder.append("\",\"chunkSize\":\"");
builder.append(chunkSize);
builder.append("\",\"lastChunk\":\"");
builder.append(lastChunk);
builder.append("\",\"flags\":\"");
builder.append(flags);
builder.append("\",\"key\":\"");
builder.append(key);
builder.append("\",\"checksum\":\"");
builder.append(checksum);
builder.append("\"}");
return builder.toString();
}
}
public int getWriteQueueLength() {
final Collection<MemcachedNode> allNodes = evcacheMemcachedClient.getNodeLocator().getAll();
int size = 0;
for(MemcachedNode node : allNodes) {
if(node instanceof EVCacheNode) {
size += ((EVCacheNode)node).getWriteQueueSize();
}
}
return size;
}
public int getReadQueueLength() {
final Collection<MemcachedNode> allNodes = evcacheMemcachedClient.getNodeLocator().getAll();
int size = 0;
for(MemcachedNode node : allNodes) {
if(node instanceof EVCacheNode) {
size += ((EVCacheNode)node).getReadQueueSize();
}
}
return size;
}
public List<Tag> getTagList() {
return tags;
}
public Counter getOperationCounter() {
return operationsCounter;
}
/**
* Return the keys upto the limit. The key will be cannoicalized key( or hashed Key).<br>
* <B> The keys are read into memory so make sure you have enough memory to read the specified number of keys<b>
* @param limit - The number of keys that need to fetched from each memcached clients.
* @return - the List of keys.
*/
public List<String> getAllKeys(final int limit) {
final List<String> keyList = new ArrayList<String>(limit);
byte[] array = new byte[EVCacheConfig.getInstance().getPropertyRepository().get(appName + ".all.keys.reader.buffer.size.bytes", Integer.class).orElse(4*1024*1024).get()];
final int waitInSec = EVCacheConfig.getInstance().getPropertyRepository().get(appName + ".all.keys.reader.wait.duration.sec", Integer.class).orElse(60).get();
for(InetSocketAddress address : memcachedNodesInZone) {
//final List<String> keyList = new ArrayList<String>(limit);
Socket socket = null;
PrintWriter printWriter = null;
BufferedInputStream bufferedReader = null;
try {
socket = new Socket(address.getHostName(), address.getPort());
printWriter = new PrintWriter(socket.getOutputStream(), true);
printWriter.print("lru_crawler metadump all \r\n");
printWriter.print("quit \r\n");
printWriter.flush();
bufferedReader = new BufferedInputStream(socket.getInputStream());
while(isDataAvailableForRead(bufferedReader, waitInSec, TimeUnit.SECONDS, socket)) {
int read = bufferedReader.read(array);
if (log.isDebugEnabled()) log.debug("Number of bytes read = " +read);
if(read > 0) {
StringBuilder b = new StringBuilder();
boolean start = true;
for (int i = 0; i < read; i++) {
if(array[i] == ' ') {
start = false;
if(b.length() > 4) keyList.add(URLDecoder.decode(b.substring(4), StandardCharsets.UTF_8.name()));
b = new StringBuilder();
}
if(start) b.append((char)array[i]);
if(array[i] == '\n') {
start = true;
}
if(keyList.size() >= limit) {
if (log.isDebugEnabled()) log.debug("Record Limit reached. Will break and return");
return keyList;
}
}
} else if (read < 0 ){
break;
}
}
} catch (Exception e) {
if(socket != null) {
try {
socket.close();
} catch (IOException e1) {
log.error("Error closing socket", e1);
}
}
log.error("Exception", e);
}
finally {
if(bufferedReader != null) {
try {
bufferedReader.close();
} catch (IOException e1) {
log.error("Error closing bufferedReader", e1);
}
}
if(printWriter != null) {
try {
printWriter.close();
} catch (Exception e1) {
log.error("Error closing socket", e1);
}
}
if(socket != null) {
try {
socket.close();
} catch (IOException e) {
if (log.isDebugEnabled()) log.debug("Error closing socket", e);
}
}
}
}
return keyList;
}
private boolean isDataAvailableForRead(BufferedInputStream bufferedReader, long timeout, TimeUnit unit, Socket socket) throws IOException {
long expiry = System.currentTimeMillis() + unit.toMillis(timeout);
int tryCount = 0;
while(expiry > System.currentTimeMillis()) {
if(log.isDebugEnabled()) log.debug("For Socket " + socket + " number of bytes available = " + bufferedReader.available() + " and try number is " + tryCount);
if(bufferedReader.available() > 0) {
return true;
}
if(tryCount++ < 5) {
try {
if(log.isDebugEnabled()) log.debug("Sleep for 100 msec");
Thread.sleep(100);
} catch (InterruptedException e) {
}
} else {
return false;
}
}
return false;
}
public EVCacheItemMetaData metaDebug(String key) throws Exception {
final EVCacheItemMetaData obj = evcacheMemcachedClient.metaDebug(key);
if(log.isDebugEnabled()) log.debug("EVCacheItemMetaData : " + obj);
return obj;
}
public <T> EVCacheItem<T> metaGet(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF) throws Exception {
final EVCacheItem<T> obj = evcacheMemcachedClient.asyncMetaGet(key, tc, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
if (log.isDebugEnabled()) log.debug("EVCacheItem : " + obj);
return obj;
}
public void addTag(String tagName, String tagValue) {
final Tag tag = new BasicTag(tagName, tagValue);
if(tags.contains(tag)) return;
final List<Tag> tagList = new ArrayList<Tag>(tags);
tagList.add(tag);
this.tags = Collections.<Tag>unmodifiableList(new ArrayList(tagList));
}
}
| 803 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/ServerGroup.java
|
package com.netflix.evcache.pool;
public class ServerGroup implements Comparable<ServerGroup> {
private final String zone;
private final String name;
public ServerGroup(String zone, String name) {
super();
this.zone = zone;
this.name = name;
}
public String getZone() {
return zone;
}
public String getName() {
return name;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((zone == null) ? 0 : zone.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof ServerGroup)) return false;
ServerGroup other = (ServerGroup) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (zone == null) {
if (other.zone != null)
return false;
} else if (!zone.equals(other.zone))
return false;
return true;
}
@Override
public String toString() {
return "Server Group [zone=" + zone + (name.equals(zone) ? "" : ", name=" + name) + "]";
}
@Override
public int compareTo(ServerGroup o) {
return toString().compareTo(o.toString());
}
}
| 804 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheServerGroupConfig.java
|
package com.netflix.evcache.pool;
import java.net.InetSocketAddress;
import java.util.Set;
public class EVCacheServerGroupConfig {
private final ServerGroup serverGroup;
private final Set<InetSocketAddress> inetSocketAddress;
public EVCacheServerGroupConfig(ServerGroup serverGroup, Set<InetSocketAddress> inetSocketAddress) {
super();
this.serverGroup = serverGroup;
this.inetSocketAddress = inetSocketAddress;
}
public ServerGroup getServerGroup() {
return serverGroup;
}
public Set<InetSocketAddress> getInetSocketAddress() {
return inetSocketAddress;
}
@Override
public String toString() {
return "EVCacheInstanceConfig [InetSocketAddress=" + inetSocketAddress + "]";
}
}
| 805 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheClientPoolMBean.java
|
package com.netflix.evcache.pool;
import java.util.Map;
public interface EVCacheClientPoolMBean {
int getInstanceCount();
Map<String, String> getInstancesByZone();
Map<String, Integer> getInstanceCountByZone();
Map<String, String> getReadZones();
Map<String, Integer> getReadInstanceCountByZone();
Map<String, String> getWriteZones();
Map<String, Integer> getWriteInstanceCountByZone();
String getFallbackServerGroup();
Map<String, String> getReadServerGroupByZone();
String getLocalServerGroupCircularIterator();
void refreshPool();
String getPoolDetails();
String getEVCacheWriteClientsCircularIterator();
}
| 806 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/SimpleNodeListProvider.java
|
package com.netflix.evcache.pool;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.charset.Charset;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.TimeUnit;
import com.netflix.archaius.api.PropertyRepository;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.JSONTokener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.net.InetAddresses;
import com.netflix.archaius.api.Property;
import com.netflix.evcache.util.EVCacheConfig;
import com.netflix.spectator.api.Tag;
import com.netflix.evcache.pool.EVCacheClientPool;
public class SimpleNodeListProvider implements EVCacheNodeList {
private static final Logger log = LoggerFactory.getLogger(EVCacheClientPool.class);
private static final String EUREKA_TIMEOUT = "evcache.eureka.timeout";
private String currentNodeList = "";
private final int timeout;
private String region = null;
private String env = null;
public SimpleNodeListProvider() {
final String timeoutStr = System.getProperty(EUREKA_TIMEOUT);
this.timeout = (timeoutStr != null) ? Integer.parseInt(timeoutStr) : 5000;
final String sysEnv = System.getenv("NETFLIX_ENVIRONMENT");
if(sysEnv != null) {
env = sysEnv;
} else {
String propEnv = null;
if(propEnv == null) propEnv = System.getProperty("@environment");
if(propEnv == null) propEnv = System.getProperty("eureka.environment");
if(propEnv == null) propEnv = System.getProperty("netflix.environment");
env = propEnv;
}
final String sysRegion = System.getenv("EC2_REGION");
if(sysRegion != null) {
region = sysRegion;
} else {
String propRegion = null;
if(propRegion == null) propRegion = System.getProperty("@region");
if(propRegion == null) propRegion = System.getProperty("eureka.region");
if(propRegion == null) propRegion = System.getProperty("netflix.region");
region = propRegion;
}
}
/**
* Pass a System Property of format
*
* <EVCACHE_APP>-NODES=setname0=instance01:port,instance02:port,
* instance03:port;setname1=instance11:port,instance12:port,instance13:port;
* setname2=instance21:port,instance22:port,instance23:port
*
*/
@Override
public Map<ServerGroup, EVCacheServerGroupConfig> discoverInstances(String appName) throws IOException {
final String propertyName = appName + "-NODES";
final String nodeListString = EVCacheConfig.getInstance().getPropertyRepository().get(propertyName, String.class).orElse("").get();
if (log.isDebugEnabled()) log.debug("List of Nodes = " + nodeListString);
if(nodeListString != null && nodeListString.length() > 0) return bootstrapFromSystemProperty(nodeListString);
if(env != null && region != null) return bootstrapFromEureka(appName);
return Collections.<ServerGroup, EVCacheServerGroupConfig> emptyMap();
}
/**
* Netflix specific impl so we can load from eureka.
* @param appName
* @return
* @throws IOException
*/
private Map<ServerGroup, EVCacheServerGroupConfig> bootstrapFromEureka(String appName) throws IOException {
if(env == null || region == null) return Collections.<ServerGroup, EVCacheServerGroupConfig> emptyMap();
final String url = "http://discoveryreadonly." + region + ".dyn" + env + ".netflix.net:7001/v2/apps/" + appName;
final CloseableHttpClient httpclient = HttpClients.createDefault();
final long start = System.currentTimeMillis();
PropertyRepository props = EVCacheConfig.getInstance().getPropertyRepository();
CloseableHttpResponse httpResponse = null;
try {
final RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(timeout).setConnectTimeout(timeout).build();
HttpGet httpGet = new HttpGet(url);
httpGet.addHeader("Accept", "application/json");
httpGet.setConfig(requestConfig);
httpResponse = httpclient.execute(httpGet);
final int statusCode = httpResponse.getStatusLine().getStatusCode();
if (!(statusCode >= 200 && statusCode < 300)) {
log.error("Status Code : " + statusCode + " for url " + url);
return Collections.<ServerGroup, EVCacheServerGroupConfig> emptyMap();
}
final InputStreamReader in = new InputStreamReader(httpResponse.getEntity().getContent(), Charset.defaultCharset());
final JSONTokener js = new JSONTokener(in);
final JSONObject jsonObj = new JSONObject(js);
final JSONObject application = jsonObj.getJSONObject("application");
final JSONArray instances = application.getJSONArray("instance");
final Map<ServerGroup, EVCacheServerGroupConfig> serverGroupMap = new HashMap<ServerGroup, EVCacheServerGroupConfig>();
final int securePort = Integer.parseInt(props.get("evcache.secure.port", String.class)
.orElse(EVCacheClientPool.DEFAULT_SECURE_PORT).get());
for(int i = 0; i < instances.length(); i++) {
final JSONObject instanceObj = instances.getJSONObject(i);
final JSONObject metadataObj = instanceObj.getJSONObject("dataCenterInfo").getJSONObject("metadata");
final String asgName = instanceObj.getString("asgName");
final Property<Boolean> asgEnabled = props.get(asgName + ".enabled", Boolean.class).orElse(true);
final boolean isSecure = props.get(asgName + ".use.secure", Boolean.class)
.orElseGet(appName + ".use.secure")
.orElseGet("evcache.use.secure")
.orElse(false).get();
if (!asgEnabled.get()) {
if(log.isDebugEnabled()) log.debug("ASG " + asgName + " is disabled so ignoring it");
continue;
}
final String zone = metadataObj.getString("availability-zone");
final ServerGroup rSet = new ServerGroup(zone, asgName);
final String localIp = metadataObj.getString("local-ipv4");
final JSONObject instanceMetadataObj = instanceObj.getJSONObject("metadata");
final String evcachePortString = instanceMetadataObj.optString("evcache.port",
EVCacheClientPool.DEFAULT_PORT);
final int evcachePort = Integer.parseInt(evcachePortString);
final int port = isSecure ? securePort : evcachePort;
EVCacheServerGroupConfig config = serverGroupMap.get(rSet);
if(config == null) {
config = new EVCacheServerGroupConfig(rSet, new HashSet<InetSocketAddress>());
serverGroupMap.put(rSet, config);
// final ArrayList<Tag> tags = new ArrayList<Tag>(2);
// tags.add(new BasicTag(EVCacheMetricsFactory.CACHE, appName));
// tags.add(new BasicTag(EVCacheMetricsFactory.SERVERGROUP, rSet.getName()));
// EVCacheMetricsFactory.getInstance().getLongGauge(EVCacheMetricsFactory.CONFIG, tags).set(Long.valueOf(port));
}
final InetAddress add = InetAddresses.forString(localIp);
final InetAddress inetAddress = InetAddress.getByAddress(localIp, add.getAddress());
final InetSocketAddress address = new InetSocketAddress(inetAddress, port);
config.getInetSocketAddress().add(address);
}
if (log.isDebugEnabled()) log.debug("Returning : " + serverGroupMap);
return serverGroupMap;
} catch (Exception e) {
if (log.isDebugEnabled()) log.debug("URL : " + url + "; Timeout " + timeout, e);
} finally {
if (httpResponse != null) {
try {
httpResponse.close();
} catch (IOException e) {
}
}
final List<Tag> tagList = new ArrayList<Tag>(2);
EVCacheMetricsFactory.getInstance().addAppNameTags(tagList, appName);
if (log.isDebugEnabled()) log.debug("Total Time to execute " + url + " " + (System.currentTimeMillis() - start) + " msec.");
EVCacheMetricsFactory.getInstance().getPercentileTimer(EVCacheMetricsFactory.INTERNAL_BOOTSTRAP_EUREKA, tagList, Duration.ofMillis(100)).record(System.currentTimeMillis() - start, TimeUnit.MILLISECONDS);
}
return Collections.<ServerGroup, EVCacheServerGroupConfig> emptyMap();
}
private Map<ServerGroup, EVCacheServerGroupConfig> bootstrapFromSystemProperty(String nodeListString ) throws IOException {
final Map<ServerGroup, EVCacheServerGroupConfig> instancesSpecific = new HashMap<ServerGroup,EVCacheServerGroupConfig>();
final StringTokenizer setTokenizer = new StringTokenizer(nodeListString, ";");
while (setTokenizer.hasMoreTokens()) {
final String token = setTokenizer.nextToken();
final StringTokenizer replicaSetTokenizer = new StringTokenizer(token, "=");
while (replicaSetTokenizer.hasMoreTokens()) {
final String replicaSetToken = replicaSetTokenizer.nextToken();
final String instanceToken = replicaSetTokenizer.nextToken();
final StringTokenizer instanceTokenizer = new StringTokenizer(instanceToken, ",");
final Set<InetSocketAddress> instanceList = new HashSet<InetSocketAddress>();
final ServerGroup rSet = new ServerGroup(replicaSetToken, replicaSetToken);
final EVCacheServerGroupConfig config = new EVCacheServerGroupConfig(rSet, instanceList);
instancesSpecific.put(rSet, config);
while (instanceTokenizer.hasMoreTokens()) {
final String instance = instanceTokenizer.nextToken();
int index = instance.indexOf(':');
String host = instance.substring(0, index);
String port = instance.substring(index + 1);
int ind = host.indexOf('/');
if (ind == -1) {
final InetAddress add = InetAddress.getByName(host);
instanceList.add(new InetSocketAddress(add, Integer.parseInt(port)));
} else {
final String hostName = host.substring(0, ind);
final String localIp = host.substring(ind + 1);
final InetAddress add = InetAddresses.forString(localIp);
final InetAddress inetAddress = InetAddress.getByAddress(hostName, add.getAddress());
instanceList.add(new InetSocketAddress(inetAddress, Integer.parseInt(port)));
}
}
}
}
currentNodeList = nodeListString;
if(log.isDebugEnabled()) log.debug("List by Servergroup" + instancesSpecific);
return instancesSpecific;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{\"Current Node List\":\"");
builder.append(currentNodeList);
builder.append("\"");
builder.append("\"}");
return builder.toString();
}
}
| 807 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/EVCacheValue.java
|
package com.netflix.evcache.pool;
import java.io.Serializable;
import java.util.Arrays;
public class EVCacheValue implements Serializable {
/**
*
*/
private static final long serialVersionUID = 3182483105524224448L;
private final String key;
private final byte[] value;
private final int flags;
private final long ttl;
private final long createTime;
public EVCacheValue(String key, byte[] value, int flags, long ttl, long createTime) {
super();
this.key = key;
this.value = value;
this.flags = flags;
this.ttl = ttl;
this.createTime = createTime;
}
public String getKey() {
return key;
}
public byte[] getValue() {
return value;
}
public int getFlags() {
return flags;
}
public long getTTL() {
return ttl;
}
public long getCreateTimeUTC() {
return createTime;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (createTime ^ (createTime >>> 32));
result = prime * result + ((key == null) ? 0 : key.hashCode());
result = prime * result + (int) (ttl ^ (ttl >>> 32));
result = prime * result + (int) (flags);
result = prime * result + Arrays.hashCode(value);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EVCacheValue other = (EVCacheValue) obj;
if (createTime != other.createTime)
return false;
if (key == null) {
if (other.key != null)
return false;
} else if (!key.equals(other.key))
return false;
if (flags != other.flags)
return false;
if (ttl != other.ttl)
return false;
if (!Arrays.equals(value, other.value))
return false;
return true;
}
@Override
public String toString() {
return "EVCacheValue [key=" + key + ", value=" + Arrays.toString(value) + ", flags=" + flags + ", ttl=" + ttl + ", createTime="
+ createTime + "]";
}
}
| 808 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/observer/EVCacheConnectionObserverMBean.java
|
package com.netflix.evcache.pool.observer;
import java.net.SocketAddress;
import java.util.Set;
public interface EVCacheConnectionObserverMBean {
int getActiveServerCount();
Set<SocketAddress> getActiveServerNames();
int getInActiveServerCount();
Set<SocketAddress> getInActiveServerNames();
long getLostCount();
long getConnectCount();
}
| 809 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/pool/observer/EVCacheConnectionObserver.java
|
package com.netflix.evcache.pool.observer;
import java.lang.management.ManagementFactory;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.evcache.pool.EVCacheClient;
import net.spy.memcached.ConnectionObserver;
public class EVCacheConnectionObserver implements ConnectionObserver, EVCacheConnectionObserverMBean {
private static final Logger log = LoggerFactory.getLogger(EVCacheConnectionObserver.class);
private final EVCacheClient client;
private long lostCount = 0;
private long connectCount = 0;
private final Set<SocketAddress> evCacheActiveSet;
private final Set<SocketAddress> evCacheInActiveSet;
private final Map<InetSocketAddress, Long> evCacheActiveStringSet;
private final Map<InetSocketAddress, Long> evCacheInActiveStringSet;
// private final Counter connectCounter, connLostCounter;
public EVCacheConnectionObserver(EVCacheClient client) {
this.client = client;
this.evCacheActiveSet = Collections.newSetFromMap(new ConcurrentHashMap<SocketAddress, Boolean>());
this.evCacheInActiveSet = Collections.newSetFromMap(new ConcurrentHashMap<SocketAddress, Boolean>());
this.evCacheActiveStringSet = new ConcurrentHashMap<InetSocketAddress, Long>();
this.evCacheInActiveStringSet = new ConcurrentHashMap<InetSocketAddress, Long>();
// final ArrayList<Tag> tags = new ArrayList<Tag>(client.getTagList().size() + 3);
// tags.addAll(client.getTagList());
// tags.add(new BasicTag(EVCacheMetricsFactory.CONFIG_NAME, EVCacheMetricsFactory.CONNECT ));
// connectCounter = EVCacheMetricsFactory.getInstance().getCounter(EVCacheMetricsFactory.CONFIG, tags);
//
// tags.clear();
// tags.addAll(client.getTagList());
// tags.add(new BasicTag(EVCacheMetricsFactory.CONFIG_NAME, EVCacheMetricsFactory.DISCONNECT ));
// connLostCounter = EVCacheMetricsFactory.getInstance().getCounter(EVCacheMetricsFactory.CONFIG, tags);
setupMonitoring(false);
}
public void connectionEstablished(SocketAddress sa, int reconnectCount) {
final String address = sa.toString();
evCacheActiveSet.add(sa);
evCacheInActiveSet.remove(sa);
final InetSocketAddress inetAdd = (InetSocketAddress) sa;
evCacheActiveStringSet.put(inetAdd, Long.valueOf(System.currentTimeMillis()));
evCacheInActiveStringSet.remove(inetAdd);
if (log.isDebugEnabled()) log.debug(client.getAppName() + ":CONNECTION ESTABLISHED : To " + address + " was established after " + reconnectCount + " retries");
if(log.isTraceEnabled()) log.trace("Stack", new Exception());
// connectCounter.increment();
connectCount++;
}
public void connectionLost(SocketAddress sa) {
final String address = sa.toString();
evCacheInActiveSet.add(sa);
evCacheActiveSet.remove(sa);
final InetSocketAddress inetAdd = (InetSocketAddress) sa;
evCacheInActiveStringSet.put(inetAdd, Long.valueOf(System.currentTimeMillis()));
evCacheActiveStringSet.remove(inetAdd);
if (log.isDebugEnabled()) log.debug(client.getAppName() + ":CONNECTION LOST : To " + address);
if(log.isTraceEnabled()) log.trace("Stack", new Exception());
lostCount++;
// connLostCounter.increment();
}
public int getActiveServerCount() {
return evCacheActiveSet.size();
}
public Set<SocketAddress> getActiveServerNames() {
return evCacheActiveSet;
}
public int getInActiveServerCount() {
return evCacheInActiveSet.size();
}
public Set<SocketAddress> getInActiveServerNames() {
return evCacheInActiveSet;
}
public long getLostCount() {
return lostCount;
}
public long getConnectCount() {
return connectCount;
}
public Map<InetSocketAddress, Long> getInActiveServers() {
return evCacheInActiveStringSet;
}
public Map<InetSocketAddress, Long> getActiveServers() {
return evCacheActiveStringSet;
}
private void setupMonitoring(boolean shutdown) {
try {
final ObjectName mBeanName = ObjectName.getInstance("com.netflix.evcache:Group=" + client.getAppName()
+ ",SubGroup=pool,SubSubGroup=" + client.getServerGroupName()+ ",SubSubSubGroup=" + client.getId());
final MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
if (mbeanServer.isRegistered(mBeanName)) {
if (log.isDebugEnabled()) log.debug("MBEAN with name " + mBeanName
+ " has been registered. Will unregister the previous instance and register a new one.");
mbeanServer.unregisterMBean(mBeanName);
}
if (!shutdown) {
mbeanServer.registerMBean(this, mBeanName);
}
} catch (Exception e) {
if (log.isWarnEnabled()) log.warn(e.getMessage(), e);
}
}
private void unRegisterInActiveNodes() {
try {
for (SocketAddress sa : evCacheInActiveSet) {
final ObjectName mBeanName = ObjectName.getInstance("com.netflix.evcache:Group=" + client.getAppName()
+ ",SubGroup=pool" + ",SubSubGroup=" + client.getServerGroupName() + ",SubSubSubGroup=" + client.getId()
+ ",SubSubSubSubGroup=" + ((InetSocketAddress) sa).getHostName());
final MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
if (mbeanServer.isRegistered(mBeanName)) {
if (log.isDebugEnabled()) log.debug("MBEAN with name " + mBeanName
+ " has been registered. Will unregister the previous instance and register a new one.");
mbeanServer.unregisterMBean(mBeanName);
}
}
} catch (Exception e) {
if (log.isWarnEnabled()) log.warn(e.getMessage(), e);
}
}
public void shutdown() {
unRegisterInActiveNodes();
setupMonitoring(true);
}
public String toString() {
return "EVCacheConnectionObserver ["
+ "EVCacheClient=" + client
+ ", evCacheActiveSet=" + evCacheActiveSet
+ ", evCacheInActiveSet=" + evCacheInActiveSet
+ ", evCacheActiveStringSet=" + evCacheActiveStringSet
+ ", evCacheInActiveStringSet=" + evCacheInActiveStringSet
+ "]";
}
public String getAppName() {
return client.getAppName();
}
public String getServerGroup() {
return client.getServerGroup().toString();
}
public int getId() {
return client.getId();
}
public EVCacheClient getClient() {
return client;
}
}
| 810 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/event/EVCacheEvent.java
|
package com.netflix.evcache.event;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import com.netflix.evcache.EVCache.Call;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import com.netflix.evcache.EVCacheKey;
import com.netflix.evcache.pool.EVCacheClient;
import com.netflix.evcache.pool.EVCacheClientPool;
import net.spy.memcached.CachedData;
import net.spy.memcached.MemcachedNode;
public class EVCacheEvent {
public static final String CLIENTS = "clients";
private final Call call;
private final String appName;
private final String cacheName;
private final EVCacheClientPool pool;
private final long startTime;
private long endTime = 0;
private String status = EVCacheMetricsFactory.SUCCESS;
private Collection<EVCacheClient> clients = null;
private Collection<EVCacheKey> evcKeys = null;
private int ttl = 0;
private CachedData cachedData = null;
private Map<Object, Object> data;
public EVCacheEvent(Call call, String appName, String cacheName, EVCacheClientPool pool) {
super();
this.call = call;
this.appName = appName;
this.cacheName = cacheName;
this.pool = pool;
this.startTime = System.currentTimeMillis();
}
public Call getCall() {
return call;
}
public String getAppName() {
return appName;
}
public String getCacheName() {
return cacheName;
}
public EVCacheClientPool getEVCacheClientPool() {
return pool;
}
public Collection<EVCacheKey> getEVCacheKeys() {
return evcKeys;
}
public void setEVCacheKeys(Collection<EVCacheKey> evcacheKeys) {
this.evcKeys = evcacheKeys;
}
public int getTTL() {
return ttl;
}
public void setTTL(int ttl) {
this.ttl = ttl;
}
public CachedData getCachedData() {
return cachedData;
}
public void setCachedData(CachedData cachedData) {
this.cachedData = cachedData;
}
public Collection<EVCacheClient> getClients() {
return clients;
}
public void setClients(Collection<EVCacheClient> clients) {
this.clients = clients;
}
public void setAttribute(Object key, Object value) {
if (data == null) data = new HashMap<Object, Object>();
data.put(key, value);
}
public Object getAttribute(Object key) {
if (data == null) return null;
return data.get(key);
}
public void setEndTime(long endTime) {
this.endTime = endTime;
}
public long getEndTime() {
return endTime;
}
public void setStatus(String status) {
this.status = status;
}
public String getStatus() {
return status;
}
/*
* Will return the duration of the call if available else -1
*/
public long getDurationInMillis() {
if(endTime == 0) return -1;
return endTime - startTime;
}
@Override
public int hashCode() {
return evcKeys.hashCode();
}
/**
* @deprecated replaced by {@link #getEVCacheKeys()}
*/
@Deprecated
public Collection<String> getKeys() {
if(evcKeys == null || evcKeys.size() == 0) return Collections.<String>emptyList();
final Collection<String> keyList = new ArrayList<String>(evcKeys.size());
for(EVCacheKey key : evcKeys) {
keyList.add(key.getKey());
}
return keyList;
}
/**
* @deprecated replaced by {@link #setEVCacheKeys(Collection)}
*/
@Deprecated
public void setKeys(Collection<String> keys) {
}
/**
* @deprecated replaced by {@link #getEVCacheKeys()}
*/
@Deprecated
public Collection<String> getCanonicalKeys() {
if(evcKeys == null || evcKeys.size() == 0) return Collections.<String>emptyList();
final Collection<String> keyList = new ArrayList<String>(evcKeys.size());
for(EVCacheKey key : evcKeys) {
keyList.add(key.getCanonicalKey());
}
return keyList;
}
public Collection<MemcachedNode> getMemcachedNode(EVCacheKey evckey) {
final Collection<MemcachedNode> nodeList = new ArrayList<MemcachedNode>(clients.size());
for(EVCacheClient client : clients) {
String key = evckey.getDerivedKey(client.isDuetClient(), client.getHashingAlgorithm(), client.shouldEncodeHashKey(), client.getMaxDigestBytes(), client.getMaxHashLength(), client.getBaseEncoder());
nodeList.add(client.getNodeLocator().getPrimary(key));
}
return nodeList;
}
/**
* @deprecated replaced by {@link #setEVCacheKeys(Collection)}
*/
public void setCanonicalKeys(Collection<String> canonicalKeys) {
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EVCacheEvent other = (EVCacheEvent) obj;
if (appName == null) {
if (other.appName != null)
return false;
} else if (!appName.equals(other.appName))
return false;
if (cacheName == null) {
if (other.cacheName != null)
return false;
} else if (!cacheName.equals(other.cacheName))
return false;
if (call != other.call)
return false;
if (evcKeys == null) {
if (other.evcKeys != null)
return false;
} else if (!evcKeys.equals(other.evcKeys))
return false;
return true;
}
public long getStartTime() {
return this.startTime;
}
@Override
public String toString() {
return "EVCacheEvent [call=" + call + ", appName=" + appName + ", cacheName=" + cacheName + ", Num of Clients="
+ clients.size() + ", evcKeys=" + evcKeys + ", ttl=" + ttl + ", event Time=" + (new Date(startTime)).toString()
+ ", cachedData=" + (cachedData != null ? "[ Flags : " + cachedData.getFlags() + "; Data Array length : " +cachedData.getData().length + "] " : "null")
+ ", Attributes=" + data + "]";
}
}
| 811 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/event/EVCacheEventListener.java
|
package com.netflix.evcache.event;
import java.util.EventListener;
import com.netflix.evcache.EVCacheException;
public interface EVCacheEventListener extends EventListener {
void onStart(EVCacheEvent e);
void onComplete(EVCacheEvent e);
void onError(EVCacheEvent e, Throwable t);
boolean onThrottle(EVCacheEvent e) throws EVCacheException;
}
| 812 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/event
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/event/throttle/ThrottleListener.java
|
package com.netflix.evcache.event.throttle;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.archaius.api.Property;
import com.netflix.evcache.EVCache.Call;
import com.netflix.evcache.event.EVCacheEvent;
import com.netflix.evcache.event.EVCacheEventListener;
import com.netflix.evcache.pool.EVCacheClientPoolManager;
import com.netflix.evcache.util.EVCacheConfig;
/**
* <p>
* To enable throttling on operations the set the below property
* <code>EVCacheThrottler.throttle.operations=true</code>
* </p>
* <p>
* To throttle all operations specified in {@link Call} then add the {@link Call} (separated by comma(,)) to the below property.<br>
* <code><EVCache appName>.throttle.calls=<comma separated list of calls></code><br>
* <br>
* EX: To throttle {@link Call.GET} and {@link Call.DELETE} operations for EVCACHE_CRS set the below property
* <code>EVCACHE_CRS.throttle.calls=GET,DELETE</code>
*
* @author smadappa
*/
@Singleton
public class ThrottleListener implements EVCacheEventListener {
private static final Logger log = LoggerFactory.getLogger(ThrottleListener.class);
private final Map<String, Property<Set<String>>> _ignoreOperationsMap;
private final Property<Boolean> enableThrottleOperations;
private final EVCacheClientPoolManager poolManager;
@Inject
public ThrottleListener(EVCacheClientPoolManager poolManager) {
this.poolManager = poolManager;
this._ignoreOperationsMap = new ConcurrentHashMap<String, Property<Set<String>>>();
enableThrottleOperations = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler.throttle.operations", Boolean.class).orElse(false);
enableThrottleOperations.subscribe(i -> setupListener());
if(enableThrottleOperations.get()) setupListener();
}
private void setupListener() {
if(enableThrottleOperations.get()) {
poolManager.addEVCacheEventListener(this);
} else {
poolManager.removeEVCacheEventListener(this);
}
}
public void onStart(final EVCacheEvent e) {
}
@Override
public boolean onThrottle(final EVCacheEvent e) {
if(!enableThrottleOperations.get()) return false;
final String appName = e.getAppName();
Property<Set<String>> throttleCalls = _ignoreOperationsMap.get(appName).orElse(Collections.emptySet());
if(throttleCalls.get().size() > 0 && throttleCalls.get().contains(e.getCall().name())) {
if(log.isDebugEnabled()) log.debug("Call : " + e.getCall() + " is throttled");
return true;
}
return false;
}
public void onComplete(EVCacheEvent e) {
}
public void onError(EVCacheEvent e, Throwable t) {
}
}
| 813 |
0 |
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/event
|
Create_ds/EVCache/evcache-core/src/main/java/com/netflix/evcache/event/hotkey/HotKeyListener.java
|
package com.netflix.evcache.event.hotkey;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.netflix.archaius.api.Property;
import com.netflix.evcache.EVCacheKey;
import com.netflix.evcache.event.EVCacheEvent;
import com.netflix.evcache.event.EVCacheEventListener;
import com.netflix.evcache.pool.EVCacheClientPoolManager;
import com.netflix.evcache.util.EVCacheConfig;
/**
* <p>
* To enable throttling of requests on the client for keys that are sending too many requests in a short duration then set the below property
* <code>EVCacheThrottler.throttle.hot.keys=true</code>
* </p>
* <br>
* Hot keys can be throttled in 2 ways.
*
* <ol>
* <li>If there are set of keys that are determined by an offline process or enabling debugging then we can set the following property (, separated)
*
* ex: <code><evcache appName>.throttle.keys=key1,key2</code>
* This will throttle all operations for keys key1 and key2
*
* </li><li>Another option is to dynamically figure based on metrics if a key is having a lot of operations.
* At the start of every operation we add the key to an internal cache for a duration specified by <code>EVCacheThrottler.< evcache appName>.inmemory.expire.after.write.duration.ms</code> (default is 10 seconds).
* If a key appears again within this duration we increment the value and release the key for <code>EVCacheThrottler.< evcache appName>.inmemory.expire.after.access.duration.ms</code> (default is 10 seconds).
* Once the key count crosses <code>EVCacheThrottler.< evcache appName>.throttle.value</code> (default is 3) then the key will be throttled. YMMV so tune this based on your evcache app and client requests.
* </li>
*
* @author smadappa
*
*/
@Singleton
public class HotKeyListener implements EVCacheEventListener {
private static final Logger log = LoggerFactory.getLogger(HotKeyListener.class);
private final Map<String, Property<Boolean>> throttleMap;
private final Map<String, Cache<String, Integer>> cacheMap;
private final Integer START_VAL = Integer.valueOf(1);
private final Property<Boolean> enableThrottleHotKeys;
private final EVCacheClientPoolManager poolManager;
private final Map<String, Property<Set<String>>> throttleKeysMap;
@Inject
public HotKeyListener(EVCacheClientPoolManager poolManager) {
this.poolManager = poolManager;
this.throttleKeysMap = new ConcurrentHashMap<String, Property<Set<String>>>();
this.throttleMap = new ConcurrentHashMap<String, Property<Boolean>>();
cacheMap = new ConcurrentHashMap<String, Cache<String, Integer>>();
enableThrottleHotKeys = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler.throttle.hot.keys", Boolean.class).orElse(false);
enableThrottleHotKeys.subscribe((i) -> setupHotKeyListener());
if(enableThrottleHotKeys.get()) setupHotKeyListener();
}
private void setupHotKeyListener() {
if(enableThrottleHotKeys.get()) {
poolManager.addEVCacheEventListener(this);
} else {
poolManager.removeEVCacheEventListener(this);
for(Cache<String, Integer> cache : cacheMap.values()) {
cache.invalidateAll();
}
}
}
private Cache<String, Integer> getCache(String appName) {
Property<Boolean> throttleFlag = throttleMap.get(appName);
if(throttleFlag == null) {
throttleFlag = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler." + appName + ".throttle.hot.keys", Boolean.class).orElse(false);
throttleMap.put(appName, throttleFlag);
}
if(log.isDebugEnabled()) log.debug("Throttle hot keys : " + throttleFlag);
if(!throttleFlag.get()) {
return null;
}
Cache<String, Integer> cache = cacheMap.get(appName);
if(cache != null) return cache;
final Property<Integer> _cacheDuration = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler." + appName + ".inmemory.expire.after.write.duration.ms", Integer.class).orElse(10000);
final Property<Integer> _exireAfterAccessDuration = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler." + appName + ".inmemory.expire.after.access.duration.ms", Integer.class).orElse(10000);
final Property<Integer> _cacheSize = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler." + appName + ".inmemory.cache.size", Integer.class).orElse(100);
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().recordStats();
if(_cacheSize.get() > 0) {
builder = builder.maximumSize(_cacheSize.get());
}
if(_exireAfterAccessDuration.get() > 0) {
builder = builder.expireAfterAccess(_exireAfterAccessDuration.get(), TimeUnit.MILLISECONDS);
} else if(_cacheDuration.get() > 0) {
builder = builder.expireAfterWrite(_cacheDuration.get(), TimeUnit.MILLISECONDS);
}
cache = builder.build();
cacheMap.put(appName, cache);
return cache;
}
public void onStart(final EVCacheEvent e) {
if(!enableThrottleHotKeys.get()) return;
final Cache<String, Integer> cache = getCache(e.getAppName());
if(cache == null) return;
for(EVCacheKey evcKey : e.getEVCacheKeys()) {
final String key = evcKey.getKey();
Integer val = cache.getIfPresent(key);
if(val == null) {
cache.put(key, START_VAL);
} else {
cache.put(key, Integer.valueOf(val.intValue() + 1));
}
}
}
@Override
public boolean onThrottle(final EVCacheEvent e) {
if(!enableThrottleHotKeys.get()) return false;
final String appName = e.getAppName();
Property<Set<String>> throttleKeysSet = throttleKeysMap.get(appName).orElse(Collections.emptySet());
if(throttleKeysSet.get().size() > 0) {
if(log.isDebugEnabled()) log.debug("Throttle : " + throttleKeysSet);
for(EVCacheKey evcKey : e.getEVCacheKeys()) {
final String key = evcKey.getKey();
if(throttleKeysSet.get().contains(key)) {
if(log.isDebugEnabled()) log.debug("Key : " + key + " is throttled");
return true;
}
}
}
final Cache<String, Integer> cache = getCache(appName);
if(cache == null) return false;
final Property<Integer> _throttleVal = EVCacheConfig.getInstance().getPropertyRepository().get("EVCacheThrottler." + appName + ".throttle.value", Integer.class).orElse(3);
for(EVCacheKey evcKey : e.getEVCacheKeys()) {
final String key = evcKey.getKey();
Integer val = cache.getIfPresent(key);
if(val.intValue() > _throttleVal.get()) {
if(log.isDebugEnabled()) log.debug("Key : " + key + " has exceeded " + _throttleVal.get() + ". Will throttle this request");
return true;
}
}
return false;
}
public void onComplete(EVCacheEvent e) {
if(!enableThrottleHotKeys.get()) return;
final String appName = e.getAppName();
final Cache<String, Integer> cache = getCache(appName);
if(cache == null) return;
for(EVCacheKey evcKey : e.getEVCacheKeys()) {
final String key = evcKey.getKey();
Integer val = cache.getIfPresent(key);
if(val != null) {
cache.put(key, Integer.valueOf(val.intValue() - 1));
}
}
}
public void onError(EVCacheEvent e, Throwable t) {
if(!enableThrottleHotKeys.get()) return;
final String appName = e.getAppName();
final Cache<String, Integer> cache = getCache(appName);
if(cache == null) return;
for(EVCacheKey evcKey : e.getEVCacheKeys()) {
final String key = evcKey.getKey();
Integer val = cache.getIfPresent(key);
if(val != null) {
cache.put(key, Integer.valueOf(val.intValue() - 1));
}
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((cacheMap == null) ? 0 : cacheMap.hashCode());
result = prime * result + ((throttleMap == null) ? 0 : throttleMap.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
HotKeyListener other = (HotKeyListener) obj;
if (cacheMap == null) {
if (other.cacheMap != null)
return false;
} else if (!cacheMap.equals(other.cacheMap))
return false;
if (throttleMap == null) {
if (other.throttleMap != null)
return false;
} else if (!throttleMap.equals(other.throttleMap))
return false;
return true;
}
}
| 814 |
0 |
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service/StartServer.java
|
package com.netflix.evcache.service;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletContextEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Injector;
import com.google.inject.servlet.ServletModule;
import com.netflix.evcache.EVCacheClientLibrary;
import com.netflix.evcache.service.resources.EVCacheRESTService;
import com.netflix.evcservice.service.StatusPage;
import com.netflix.server.base.BaseHealthCheckServlet;
import com.netflix.server.base.BaseStatusPage;
import com.netflix.server.base.NFFilter;
import com.netflix.server.base.lifecycle.BaseServerLifecycleListener;
import com.sun.jersey.api.core.ResourceConfig;
import com.sun.jersey.api.core.PackagesResourceConfig;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
public class StartServer extends BaseServerLifecycleListener
{
private static final Logger logger = LoggerFactory.getLogger(StartServer.class);
private static final String APP_NAME = "evcacheproxy";
private static final String CONFIG_NAME = "evcacheproxy";
/**
* Creates a new StartServer object.
*/
public StartServer() {
super(CONFIG_NAME, APP_NAME, null);
}
@Override
protected void initialize(ServletContextEvent sce) throws Exception {
Injector injector = getInjector();
injector.getInstance(EVCacheClientLibrary.class);
}
@Override
protected ServletModule getServletModule() {
return new JerseyServletModule() {
@Override
protected void configureServlets() {
logger.info("########## CONFIGURING SERVLETS ##########");
// initialize NFFilter
Map<String, String> initParams = new HashMap<String,String>();
// initParams.put(ServletContainer.JSP_TEMPLATES_BASE_PATH, "/WEB-INF/jsp");
// initParams.put(ServletContainer.FEATURE_FILTER_FORWARD_ON_404, "true");
// initParams.put("requestId.accept", "true");
// initParams.put("requestId.require", "true");
initParams.put(ResourceConfig.FEATURE_DISABLE_WADL, "true");
initParams.put(PackagesResourceConfig.PROPERTY_PACKAGES, "com.netflix.evcache.service.resources");
filter("/*").through(NFFilter.class, initParams);
filter("/healthcheck", "/status").through(NFFilter.class, initParams);
serve("/Status", "/status").with(BaseStatusPage.class);
serve("/healthcheck", "/Healthcheck").with(BaseHealthCheckServlet.class);
serve("/*").with(GuiceContainer.class, initParams);
bind(EVCacheRESTService.class).asEagerSingleton();
binder().bind(GuiceContainer.class).asEagerSingleton();
install(new EVCacheServiceModule());
}
};
}
}
| 815 |
0 |
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service/HealthCheckHandlerImpl.java
|
package com.netflix.evcache.service;
import com.google.inject.Singleton;
import com.netflix.server.base.BaseHealthCheckServlet;
/**
* Created by senugula on 03/22/15.
*/
@Singleton
public class HealthCheckHandlerImpl extends BaseHealthCheckServlet {
public int getStatus() {
return 200; // TODO
}
}
| 816 |
0 |
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service/EVCacheServiceModule.java
|
package com.netflix.evcache.service;
import com.google.inject.AbstractModule;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.config.ConfigurationManager;
import com.netflix.discovery.guice.EurekaModule;
import com.netflix.evcache.EVCacheModule;
import com.netflix.evcache.connection.ConnectionModule;
import com.netflix.evcache.service.resources.EVCacheRESTService;
import com.netflix.governator.ShutdownHookModule;
import com.netflix.spectator.nflx.SpectatorModule;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import netflix.adminresources.resources.KaryonWebAdminModule;
public class EVCacheServiceModule extends AbstractModule {
@Override
protected void configure() {
// try {
// ConfigurationManager.loadAppOverrideProperties("evcacheproxy");
// final String env = ConfigurationManager.getConfigInstance().getString("eureka.environment", "test");
// if(env != null && env.length() > 0) {
// ConfigurationManager.loadAppOverrideProperties("evcacheproxy-"+env);
// }
// } catch (Exception e) {
// e.printStackTrace();
// }
//
//
// install(new ShutdownHookModule());
// install(new EurekaModule());
// install(new SpectatorModule());
// install(new ConnectionModule());
// install(new EVCacheModule());
// install(new KaryonWebAdminModule());
// install(new JerseyServletModule() {
// protected void configureServlets() {
// serve("/*").with(GuiceContainer.class);
// binder().bind(GuiceContainer.class).asEagerSingleton();
// bind(EVCacheRESTService.class).asEagerSingleton();
// bind(HealthCheckHandlerImpl.class).asEagerSingleton();
// }
// });
}
}
| 817 |
0 |
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service
|
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service/resources/EVCacheRESTService.java
|
package com.netflix.evcache.service.resources;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.netflix.evcache.EVCache;
import com.netflix.evcache.EVCacheException;
import com.netflix.evcache.EVCacheLatch;
import com.netflix.evcache.EVCacheLatch.Policy;
import com.netflix.evcache.service.transcoder.RESTServiceTranscoder;
import net.spy.memcached.CachedData;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* Created by senugula on 3/22/16.
*/
@Singleton
@Path("/evcrest/v1.0")
public class EVCacheRESTService {
private static final Logger logger = LoggerFactory.getLogger(EVCacheRESTService.class);
private final EVCache.Builder builder;
private final Map<String, EVCache> evCacheMap;
private final RESTServiceTranscoder evcacheTranscoder = new RESTServiceTranscoder();
@Inject
public EVCacheRESTService(EVCache.Builder builder) {
this.builder = builder;
this.evCacheMap = new HashMap<>();
}
@POST
@Path("{appId}/{key}")
@Consumes({MediaType.APPLICATION_OCTET_STREAM})
@Produces(MediaType.TEXT_PLAIN)
public Response setOperation(final InputStream in, @PathParam("appId") String pAppId, @PathParam("key") String key,
@QueryParam("ttl") String ttl, @DefaultValue("") @QueryParam("flag") String flag) {
try {
final String appId = pAppId.toUpperCase();
final byte[] bytes = IOUtils.toByteArray(in);
return setData(appId, ttl, flag, key, bytes);
} catch (EVCacheException e) {
e.printStackTrace();
return Response.serverError().build();
} catch (Throwable t) {
return Response.serverError().build();
}
}
@PUT
@Path("{appId}/{key}")
@Consumes({MediaType.APPLICATION_OCTET_STREAM})
@Produces(MediaType.TEXT_PLAIN)
public Response putOperation(final InputStream in, @PathParam("appId") String pAppId, @PathParam("key") String key,
@QueryParam("ttl") String ttl, @DefaultValue("") @QueryParam("flag") String flag) {
try {
final String appId = pAppId.toUpperCase();
final byte[] bytes = IOUtils.toByteArray(in);
return setData(appId, ttl, flag, key, bytes);
} catch (EVCacheException e) {
e.printStackTrace();
return Response.serverError().build();
} catch (Throwable t) {
return Response.serverError().build();
}
}
private Response setData(String appId, String ttl, String flag, String key, byte[] bytes) throws EVCacheException, InterruptedException {
final EVCache evcache = getEVCache(appId);
if (ttl == null) {
return Response.status(400).type("text/plain").entity("Please specify ttl for the key " + key + " as query parameter \n").build();
}
final int timeToLive = Integer.valueOf(ttl).intValue();
EVCacheLatch latch = null;
if(flag != null && flag.length() > 0) {
final CachedData cd = new CachedData(Integer.parseInt(flag), bytes, Integer.MAX_VALUE);
latch = evcache.set(key, cd, timeToLive, Policy.ALL_MINUS_1);
} else {
latch = evcache.set(key, bytes, timeToLive, Policy.ALL_MINUS_1);
}
if(latch != null) {
final boolean status = latch.await(2500, TimeUnit.MILLISECONDS);
if(status) {
return Response.ok("Set Operation for Key - " + key + " was successful. \n").build();
} else {
if(latch.getCompletedCount() > 0) {
if(latch.getSuccessCount() == 0){
return Response.serverError().build();
} else if(latch.getSuccessCount() > 0 ) {
return Response.ok("Set Operation for Key - " + key + " was successful in " + latch.getSuccessCount() + " Server Groups. \n").build();
}
} else {
return Response.serverError().build();
}
}
}
return Response.serverError().build();
}
@GET
@Path("{appId}/{key}")
@Produces({MediaType.APPLICATION_OCTET_STREAM})
public Response getOperation(@PathParam("appId") String appId,
@PathParam("key") String key) {
appId = appId.toUpperCase();
if (logger.isDebugEnabled()) logger.debug("Get for application " + appId + " for Key " + key);
try {
final EVCache evCache = getEVCache(appId);
CachedData cachedData = (CachedData) evCache.get(key, evcacheTranscoder);
if (cachedData == null) {
return Response.status(404).type("text/plain").entity("Key " + key + " Not Found in cache " + appId + "\n").build();
}
byte[] bytes = cachedData.getData();
if (bytes == null) {
return Response.status(404).type("text/plain").entity("Key " + key + " Not Found in cache " + appId + "\n").build();
} else {
return Response.status(200).type("application/octet-stream").entity(bytes).build();
}
} catch (EVCacheException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
@DELETE
@Path("{appId}/{key}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces("text/plain")
public Response deleteOperation(@PathParam("appId") String appId, @PathParam("key") String key) {
if (logger.isDebugEnabled()) logger.debug("Get for application " + appId + " for Key " + key);
appId = appId.toUpperCase();
final EVCache evCache = getEVCache(appId);
try {
Future<Boolean>[] _future = evCache.delete(key);
if (_future.equals(Boolean.TRUE)) {
if (logger.isDebugEnabled()) logger.debug("set key is successful");
}
return Response.ok("Deleted Operation for Key - " + key + " was successful. \n").build();
} catch (EVCacheException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
private EVCache getEVCache(String appId) {
EVCache evCache = evCacheMap.get(appId);
if (evCache != null) return evCache;
evCache = builder.setAppName(appId).build();
evCacheMap.put(appId, evCache);
return evCache;
}
}
| 818 |
0 |
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service
|
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcache/service/transcoder/RESTServiceTranscoder.java
|
package com.netflix.evcache.service.transcoder;
import net.spy.memcached.CachedData;
import net.spy.memcached.transcoders.SerializingTranscoder;
/**
* Created by senugula on 6/23/16.
*/
public class RESTServiceTranscoder extends SerializingTranscoder {
static final int COMPRESSED = 2;
public RESTServiceTranscoder() {
}
public boolean asyncDecode(CachedData d) {
return false;
}
public CachedData decode(CachedData d) {
if ((d.getFlags() & COMPRESSED) != 0) {
d = new CachedData(d.getFlags(), super.decompress(d.getData()), d.MAX_SIZE);
}
return d;
}
public CachedData encode(CachedData o) {
return o;
}
public int getMaxSize() {
return CachedData.MAX_SIZE;
}
}
| 819 |
0 |
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcservice
|
Create_ds/EVCache/evcacheproxy/src/main/java/com/netflix/evcservice/service/StatusPage.java
|
package com.netflix.evcservice.service;
import com.google.inject.Singleton;
import java.io.PrintWriter;
import com.netflix.server.base.BaseStatusPage;
/**
* Created by senugula on 03/22/15.
*/
@Singleton
public class StatusPage extends BaseStatusPage {
private static final long serialVersionUID = 1L;
@Override
protected void getDetails(PrintWriter out, boolean htmlize) {
super.getDetails(out, htmlize);
// Add any extra status info here
}
}
| 820 |
0 |
Create_ds/EVCache/evcache-client/test/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/test/com/netflix/evcache/test/DIBase.java
|
package com.netflix.evcache.test;
import com.google.inject.Injector;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.archaius.config.MapConfig;
import com.netflix.archaius.guice.ArchaiusModule;
import com.netflix.discovery.guice.EurekaClientModule;
import com.netflix.evcache.EVCache;
import com.netflix.evcache.EVCacheLatch;
import com.netflix.evcache.EVCacheModule;
import com.netflix.evcache.EVCacheLatch.Policy;
import com.netflix.evcache.connection.DIConnectionModule;
import com.netflix.evcache.operation.EVCacheLatchImpl;
import com.netflix.evcache.pool.EVCacheClient;
import com.netflix.evcache.pool.EVCacheClientPoolManager;
import com.netflix.governator.guice.LifecycleInjector;
import com.netflix.governator.guice.LifecycleInjectorBuilder;
import com.netflix.governator.lifecycle.LifecycleManager;
import com.netflix.spectator.nflx.SpectatorModule;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
import rx.Scheduler;
@SuppressWarnings("unused")
public abstract class DIBase {
private static final Logger log = LoggerFactory.getLogger(DIBase.class);
protected EVCache evCache = null;
protected Injector injector = null;
protected LifecycleManager lifecycleManager = null;
protected EVCacheClientPoolManager manager = null;
protected Properties getProps() {
String hostname = System.getenv("EC2_HOSTNAME");
Properties props = new Properties();
if(hostname == null) {
props.setProperty("eureka.datacenter", "datacenter");//change to ndc while running on desktop
props.setProperty("eureka.validateInstanceId","false");
props.setProperty("eureka.mt.connect_timeout","1");
props.setProperty("eureka.mt.read_timeout","1");
} else {
props.setProperty("eureka.datacenter", "cloud");
props.setProperty("eureka.validateInstanceId","true");
}
System.setProperty("@region", "us-east-1");
System.setProperty("@environment", "test");
System.setProperty("eureka.region", "us-east-1");
System.setProperty("eureka.environment", "test");
props.setProperty("eureka.environment", "test");
props.setProperty("eureka.region", "us-east-1");
props.setProperty("eureka.appid", "clatency");
props.setProperty("eureka.serviceUrl.default","http://${eureka.region}.discovery${eureka.environment}.netflix.net:7001/discovery/v2/");
props.setProperty("log4j.rootLogger", "DEBUG");
System.setProperty("log4j.rootLogger", "DEBUG");
props.setProperty("log4j.logger.com.netflix.evcache.test.DIBase", "DEBUG");
props.setProperty("log4j.logger.com.netflix.evcache.test.EVCacheTestDI", "DEBUG");
props.setProperty("log4j.logger.com.netflix.evcache.pool.EVCacheNodeLocator", "ERROR");
props.setProperty("log4j.logger.com.netflix.evcache.pool.EVCacheClientUtil", "DEBUG");
return props;
}
public void setupTest(Properties props) {
}
@BeforeSuite
public void setupEnv() {
Properties props = getProps();
try {
LifecycleInjectorBuilder builder = LifecycleInjector.builder();
builder.withModules(
new EurekaClientModule(),
new EVCacheModule(),
new DIConnectionModule(),
new SpectatorModule(),
new ArchaiusModule() {
protected void configureArchaius() {
bindApplicationConfigurationOverride().toInstance(MapConfig.from(props));
};
}
);
injector = builder.build().createInjector();
lifecycleManager = injector.getInstance(LifecycleManager.class);
lifecycleManager.start();
injector.getInstance(ApplicationInfoManager.class);
final EVCacheModule lib = injector.getInstance(EVCacheModule.class);
manager = injector.getInstance(EVCacheClientPoolManager.class);
} catch (Throwable e) {
e.printStackTrace();
log.error(e.getMessage(), e);
}
}
@AfterSuite
public void shutdownEnv() {
lifecycleManager.close();
}
protected EVCache.Builder getNewBuilder() {
final EVCache.Builder evCacheBuilder = injector.getInstance(EVCache.Builder.class);
if(log.isDebugEnabled()) log.debug("evCacheBuilder : " + evCacheBuilder);
return evCacheBuilder;
}
protected boolean append(int i, EVCache gCache) throws Exception {
String val = ";APP_" + i;
String key = "key_" + i;
Future<Boolean>[] status = gCache.append(key, val, 60 * 60);
for (Future<Boolean> s : status) {
if (log.isDebugEnabled()) log.debug("APPEND : key : " + key + "; success = " + s.get() + "; Future = " + s.toString());
if (s.get() == Boolean.FALSE) return false;
}
return true;
}
protected boolean appendOrAdd(int i, EVCache gCache) throws Exception {
return appendOrAdd(i, gCache, 60 * 60);
}
protected boolean appendOrAdd(int i, EVCache gCache, int ttl) throws Exception {
String val = "val_aa_" + i;
String key = "key_" + i;
EVCacheLatch latch = gCache.appendOrAdd(key, val, null, ttl, Policy.ALL_MINUS_1);
if(log.isDebugEnabled()) log.debug("AppendOrAdd : key : " + key + "; Latch = " + latch);
boolean status = latch.await(2000, TimeUnit.MILLISECONDS);
if(log.isDebugEnabled()) log.debug("AppendOrAdd : key : " + key + "; success = " + status);
return true;
}
public boolean add(int i, EVCache gCache) throws Exception {
//String val = "This is a very long value that should work well since we are going to use compression on it. blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah val_"+i;
String val = "val_add_"+i;
String key = "key_" + i;
boolean status = gCache.add(key, val, null, 60 * 60);
if(log.isDebugEnabled()) log.debug("ADD : key : " + key + "; success = " + status);
return status;
}
public boolean insert(int i, EVCache gCache) throws Exception {
//String val = "This is a very long value that should work well since we are going to use compression on it. blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah val_"+i;
String val = "val_"+i;
String key = "key_" + i;
Future<Boolean>[] status = gCache.set(key, val, 60 * 60);
for(Future<Boolean> s : status) {
if(log.isDebugEnabled()) log.debug("SET : key : " + key + "; success = " + s.get() + "; Future = " + s.toString());
if(s.get() == Boolean.FALSE) return false;
}
return true;
}
protected boolean replace(int i, EVCache gCache) throws Exception {
return replace(i, gCache, 60 * 60);
}
protected boolean replace(int i, EVCache gCache, int ttl) throws Exception {
String val = "val_replaced_" + i;
String key = "key_" + i;
EVCacheLatch status = gCache.replace(key, val, null, ttl, Policy.ALL);
boolean opStatus = status.await(1000, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) log.debug("REPLACE : key : " + key + "; success = " + opStatus + "; EVCacheLatch = " + status);
return status.getSuccessCount() > 0;
}
public boolean delete(int i, EVCache gCache) throws Exception {
String key = "key_" + i;
Future<Boolean>[] status = gCache.delete(key);
for(Future<Boolean> s : status) {
if(log.isDebugEnabled()) log.debug("DELETE : key : " + key + "; success = " + s.get() + "; Future = " + s.toString());
if(s.get() == Boolean.FALSE) return false;
}
return true;
}
protected boolean touch(int i, EVCache gCache) throws Exception {
return touch(i, gCache, 60 * 60);
}
protected boolean touch(int i, EVCache gCache, int ttl) throws Exception {
String key = "key_" + i;
Future<Boolean>[] status = gCache.touch(key, ttl);
for (Future<Boolean> s : status) {
if (log.isDebugEnabled()) log.debug("TOUCH : key : " + key + "; success = " + s.get() + "; Future = " + s.toString());
if (s.get() == Boolean.FALSE) return false;
}
return true;
}
@SuppressWarnings("deprecation")
protected boolean insertUsingLatch(int i, String app) throws Exception {
String val = "val_" + i;
String key = "key_" + i;
long start = System.currentTimeMillis();
final EVCacheClient[] clients = manager.getEVCacheClientPool(app).getEVCacheClientForWrite();
final EVCacheLatch latch = new EVCacheLatchImpl(EVCacheLatch.Policy.ALL, clients.length, app);
for (EVCacheClient client : clients) {
client.set(key, val, 60 * 60, latch);
}
boolean success = latch.await(1000, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) log.debug("SET LATCH : key : " + key + "; Finished in " + (System.currentTimeMillis() - start) + " msec");
return success;
}
protected boolean deleteLatch(int i, String appName) throws Exception {
long start = System.currentTimeMillis();
String key = "key_" + i;
final EVCacheClient[] clients = manager.getEVCacheClientPool(appName).getEVCacheClientForWrite();
final EVCacheLatch latch = new EVCacheLatchImpl(Policy.ALL, clients.length, appName);
for (EVCacheClient client : clients) {
client.delete(key, latch);
}
latch.await(1000, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) log.debug("DELETE LATCH : key : " + key + "; Finished in " + (System.currentTimeMillis() - start) + " msec" + "; Latch : " + latch);
return true;
}
public String get(int i, EVCache gCache) throws Exception {
String key = "key_" + i;
String value = gCache.<String>get(key);
if(log.isDebugEnabled()) log.debug("get : key : " + key + " val = " + value);
return value;
}
public String getAndTouch(int i, EVCache gCache) throws Exception {
String key = "key_" + i;
String value = gCache.<String>getAndTouch(key, 60 * 60);
if(log.isDebugEnabled()) log.debug("getAndTouch : key : " + key + " val = " + value);
return value;
}
public Map<String, String> getBulk(String keys[], EVCache gCache) throws Exception {
final Map<String, String> value = gCache.<String>getBulk(keys);
if(log.isDebugEnabled()) log.debug("getBulk : keys : " + Arrays.toString(keys) + "; values = " + value);
return value;
}
public Map<String, String> getBulkAndTouch(String keys[], EVCache gCache, int ttl) throws Exception {
final Map<String, String> value = gCache.<String>getBulkAndTouch(Arrays.asList(keys), null, ttl);
if(log.isDebugEnabled()) log.debug("getBulk : keys : " + Arrays.toString(keys) + "; values = " + value);
return value;
}
public String getObservable(int i, EVCache gCache, Scheduler scheduler) throws Exception {
String key = "key_" + i;
String value = gCache.<String>get(key, scheduler).toBlocking().value();
if(log.isDebugEnabled()) log.debug("get : key : " + key + " val = " + value);
return value;
}
public String getAndTouchObservable(int i, EVCache gCache, Scheduler scheduler) throws Exception {
String key = "key_" + i;
String value = gCache.<String>getAndTouch(key, 60 * 60, scheduler).toBlocking().value();
if(log.isDebugEnabled()) log.debug("getAndTouch : key : " + key + " val = " + value);
return value;
}
class RemoteCaller implements Runnable {
EVCache gCache;
public RemoteCaller(EVCache c) {
this.gCache = c;
}
public void run() {
try {
int count = 1;
for(int i = 0; i < 100; i++) {
insert(i, gCache);
get(i, gCache);
delete(i, gCache);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
}
| 821 |
0 |
Create_ds/EVCache/evcache-client/test/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/test/com/netflix/evcache/test/EVCacheTestDI.java
|
package com.netflix.evcache.test;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import java.util.Map;
import java.util.Properties;
import java.util.*;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import com.netflix.evcache.*;
import com.netflix.evcache.pool.EVCacheClient;
import com.netflix.evcache.pool.ServerGroup;
import com.netflix.evcache.util.KeyHasher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;
import com.netflix.evcache.operation.EVCacheOperationFuture;
import rx.schedulers.Schedulers;
import static org.testng.Assert.*;
public class EVCacheTestDI extends DIBase implements EVCacheGetOperationListener<String> {
private static final Logger log = LoggerFactory.getLogger(EVCacheTestDI.class);
private int loops = 1;
private Map<String, String> propertiesToSet;
private String appName = "EVCACHE_TEST";
public static void main(String args[]) {
try {
EVCacheTestDI test = new EVCacheTestDI();
test.testAll();
} catch(Throwable t) {
log.error(t.getMessage(), t);
}
}
public EVCacheTestDI() {
propertiesToSet = new HashMap<>();
propertiesToSet.putIfAbsent(appName + ".us-east-1d.EVCacheClientPool.writeOnly", "false");
propertiesToSet.putIfAbsent(appName + ".EVCacheClientPool.poolSize", "1");
propertiesToSet.putIfAbsent(appName + ".ping.servers", "false");
propertiesToSet.putIfAbsent(appName + ".cid.throw.exception", "true");
propertiesToSet.putIfAbsent(appName + ".EVCacheClientPool.readTimeout", "500");
propertiesToSet.putIfAbsent(appName + ".EVCacheClientPool.bulkReadTimeout", "500");
propertiesToSet.putIfAbsent(appName + ".max.read.queue.length", "20");
propertiesToSet.putIfAbsent("EVCacheClientPoolManager.log.apps", appName);
propertiesToSet.putIfAbsent(appName + ".fallback.zone", "true");
propertiesToSet.putIfAbsent(appName + ".enable.throttling", "false");
propertiesToSet.putIfAbsent(appName + ".throttle.time", "0");
propertiesToSet.putIfAbsent(appName + ".throttle.percent", "0");
propertiesToSet.putIfAbsent(appName + ".log.operation", "1000");
propertiesToSet.putIfAbsent(appName + ".EVCacheClientPool.validate.input.queue", "true");
propertiesToSet.putIfAbsent("evcache.use.binary.protocol", "false");
}
protected Properties getProps() {
Properties props = super.getProps();
propertiesToSet.entrySet().forEach(entry -> props.setProperty(entry.getKey(), entry.getValue()));
return props;
}
@Test
public void testEVCache() {
this.evCache = getNewBuilder().setAppName(appName).setCachePrefix("cid").enableRetry().build();
assertNotNull(evCache);
}
@Test(dependsOnMethods = { "testEVCache" })
public void testKeySizeCheck() throws Exception {
final String key = "This is an invalid key";
boolean exceptionThrown = false;
for (int i = 0; i < loops; i++) {
try {
if (log.isDebugEnabled()) log.debug("Check key : " + key );
evCache.<String>get(key);
} catch(Exception e) {
exceptionThrown = true;
if (log.isDebugEnabled()) log.debug("Check key : " + key + ": INVALID");
}
assertTrue(exceptionThrown);
}
final String longKey = "This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.This_is_an_a_very_long_key.";
exceptionThrown = false;
for (int i = 0; i < loops; i++) {
try {
if (log.isDebugEnabled()) log.debug("Check key length : " + longKey );
evCache.<String>get(longKey);
} catch(Exception e) {
exceptionThrown = true;
if (log.isDebugEnabled()) log.debug("Check key length: " + longKey + ": INVALID");
}
assertTrue(exceptionThrown);
}
}
@Test(dependsOnMethods = { "testKeySizeCheck" })
public void testTouch() throws Exception {
for (int i = 0; i < loops; i++) {
touch(i, evCache);
}
}
@Test(dependsOnMethods = { "testTouch" })
public void testDelete() throws Exception {
for (int i = 0; i < loops; i++) {
delete(i, evCache);
}
}
@Test(dependsOnMethods = { "testDelete" })
public void testAdd() throws Exception {
for (int i = 0; i < loops; i++) {
add(i, evCache);
}
}
@Test(dependsOnMethods = { "testAdd" })
public void testInsertBinary() throws Exception {
for (int i = 0; i < loops; i++) {
assertTrue(insertBytes(i, evCache));
}
}
private boolean insertBytes(int i, EVCache gCache) throws Exception {
byte[] val = ("val_" + i).getBytes();
String key = "key_b_" + i;
Future<Boolean>[] status = gCache.set(key, val, 24 * 60 * 60);
for (Future<Boolean> s : status) {
if (log.isDebugEnabled()) log.debug("SET BYTES : key : " + key + "; success = " + s.get() + "; Future = " + s.toString());
if (s.get() == Boolean.FALSE) return false;
}
return true;
}
@Test(dependsOnMethods = { "testInsertBinary" })
public void testGetBytes() throws Exception {
for (int i = 0; i < loops; i++) {
String key = "key_b_" + i;
byte[] value = evCache.<byte[]> get(key);
if(value != null) {
if (log.isDebugEnabled()) log.debug("get : key : " + key + " val length = " + value.length);
}
assertNotNull(value);
}
}
@Test(dependsOnMethods = { "testGetBytes" })
public void testInsert() throws Exception {
for (int i = 0; i < loops; i++) {
assertTrue(insert(i, evCache));
}
}
@Test(dependsOnMethods = { "testInsert" })
public void testGet() throws Exception {
for (int i = 0; i < loops; i++) {
final String val = get(i, evCache);
assertNotNull(val);
assertTrue(val.equals("val_" + i));
}
}
@Test(dependsOnMethods = { "testGet" })
public void testGetAndTouch() throws Exception {
for (int i = 0; i < loops; i++) {
final String val = getAndTouch(i, evCache);
assertNotNull(val);
assertTrue(val.equals("val_" + i));
}
}
@Test(dependsOnMethods = { "testGetAndTouch" })
public void testBulk() throws Exception {
final String[] keys = new String[loops];
for (int i = 0; i < loops; i++) {
keys[i] = "key_" + i;
}
Map<String, String> vals = getBulk(keys, evCache);
assertNotNull(vals);
for (int i = 0; i < keys.length; i++) {
String key = keys[i];
String val = vals.get(key);
if (val == null) {
if (log.isDebugEnabled()) log.debug("key " + key + " returned null");
} else {
assertTrue(val.equals("val_" + i));
}
}
}
@Test(dependsOnMethods = { "testBulk" })
public void testBulkAndTouch() throws Exception {
final String[] keys = new String[loops];
for (int i = 0; i < loops; i++) {
keys[i] = "key_" + i;
}
Map<String, String> vals = getBulkAndTouch(keys, evCache, 24 * 60 * 60);
assertNotNull(vals);
for (int i = 0; i < vals.size(); i++) {
String key = "key_" + i;
String val = vals.get(key);
if (val == null) {
if (log.isDebugEnabled()) log.debug("key " + key + " returned null");
} else {
assertTrue(val.equals("val_" + i));
}
}
}
@Test(dependsOnMethods = { "testInsert" })
public void testGetObservable() throws Exception {
for (int i = 0; i < loops; i++) {
final String val = getObservable(i, evCache, Schedulers.computation());
assertNotNull(val);
assertTrue(val.equals("val_" + i));
}
}
@Test(dependsOnMethods = { "testGetObservable" })
public void testGetAndTouchObservable() throws Exception {
for (int i = 0; i < loops; i++) {
final String val = getAndTouchObservable(i, evCache, Schedulers.computation());
assertNotNull(val);
assertTrue(val.equals("val_" + i));
}
}
@Test(dependsOnMethods = { "testGetAndTouchObservable" })
public void waitForCallbacks() throws Exception {
Thread.sleep(1000);
}
@Test(dependsOnMethods = { "waitForCallbacks" })
public void testReplace() throws Exception {
for (int i = 0; i < 10; i++) {
replace(i, evCache);
}
}
@Test(dependsOnMethods = { "testReplace" })
public void testAppendOrAdd() throws Exception {
for (int i = 0; i < loops; i++) {
assertTrue(appendOrAdd(i, evCache));
}
}
private void refreshEVCache() {
setupEnv();
testEVCache();
}
@Test(dependsOnMethods = {"testAppendOrAdd"})
public void functionalTestsWithAppLevelAndASGLevelHashingScenarios() throws Exception {
refreshEVCache();
// no hashing
assertFalse(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".hash.key", Boolean.class).orElse(false).get());
doFunctionalTests(false);
// hashing at app level
propertiesToSet.put(appName + ".hash.key", "true");
refreshEVCache();
assertTrue(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".hash.key", Boolean.class).orElse(false).get());
doFunctionalTests(true);
propertiesToSet.remove(appName + ".hash.key");
// hashing at app level due to auto hashing as a consequence of a large key
propertiesToSet.put(appName + ".auto.hash.keys", "true");
refreshEVCache();
assertTrue(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".auto.hash.keys", Boolean.class).orElse(false).get());
assertFalse(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".hash.key", Boolean.class).orElse(false).get());
testWithLargeKey();
// negative scenario
propertiesToSet.remove(appName + ".auto.hash.keys");
refreshEVCache();
assertFalse(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".auto.hash.keys", Boolean.class).orElse(false).get());
assertFalse(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".hash.key", Boolean.class).orElse(false).get());
assertThrows(IllegalArgumentException.class, () -> {
testWithLargeKey();
});
// hashing at app level by choice AND different hashing at each asg
Map<String, KeyHasher.HashingAlgorithm> hashingAlgorithmsByServerGroup = new HashMap<>();
propertiesToSet.put(appName + ".hash.key", "true");
refreshEVCache();
assertTrue(manager.getEVCacheConfig().getPropertyRepository().get(appName + ".hash.key", Boolean.class).orElse(false).get());
// get server group names, to be used to configure the ASG level hashing properties
Map<ServerGroup, List<EVCacheClient>> clientsByServerGroup = manager.getEVCacheClientPool(appName).getAllInstancesByServerGroup();
int i = 0;
KeyHasher.HashingAlgorithm hashingAlgorithm = KeyHasher.HashingAlgorithm.values()[0];
for (ServerGroup serverGroup : clientsByServerGroup.keySet()) {
// use below logic to have different hashing per asg once the code supports. Currently the code caches the value that it uses for all the asgs
// KeyHasher.HashingAlgorithm hashingAlgorithm = KeyHasher.HashingAlgorithm.values()[i++ % KeyHasher.HashingAlgorithm.values().length];
hashingAlgorithmsByServerGroup.put(serverGroup.getName(), hashingAlgorithm);
propertiesToSet.put(serverGroup.getName() + ".hash.key", "true");
propertiesToSet.put(serverGroup.getName() + ".hash.algo", hashingAlgorithm.name());
}
refreshEVCache();
clientsByServerGroup = manager.getEVCacheClientPool(appName).getAllInstancesByServerGroup();
// validate hashing properties of asgs
for (ServerGroup serverGroup : clientsByServerGroup.keySet()) {
assertEquals(clientsByServerGroup.get(serverGroup).get(0).getHashingAlgorithm(), hashingAlgorithmsByServerGroup.get(serverGroup.getName()));
}
doFunctionalTests(true);
for (ServerGroup serverGroup : clientsByServerGroup.keySet()) {
propertiesToSet.remove(serverGroup.getName());
}
}
private void testWithLargeKey() throws Exception {
StringBuilder sb = new StringBuilder();
for (int i= 0; i < 100; i++) {
sb.append(Long.toString(System.currentTimeMillis()));
}
String key = sb.toString();
String value = UUID.randomUUID().toString();
// set
EVCacheLatch latch = evCache.set(key, value, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
// get
assertEquals(evCache.get(key), value);
}
private void doFunctionalTests(boolean isHashingEnabled) throws Exception {
String key1 = Long.toString(System.currentTimeMillis());
String value1 = UUID.randomUUID().toString();
// set
EVCacheLatch latch = evCache.set(key1, value1, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
// get
assertEquals(evCache.get(key1), value1);
// replace
value1 = UUID.randomUUID().toString();
latch = evCache.replace(key1, value1, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
// get
assertEquals(evCache.get(key1), value1);
// add a key
String key2 = Long.toString(System.currentTimeMillis());
String value2 = UUID.randomUUID().toString();
latch = evCache.add(key2, value2, null, 1000, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
// get
assertEquals(evCache.get(key2), value2);
// appendoradd - append case
String value3 = UUID.randomUUID().toString();
if (isHashingEnabled) {
assertThrows(EVCacheException.class, () -> {
evCache.appendOrAdd(key2, value3, null, 1000, EVCacheLatch.Policy.ALL);
});
} else {
latch = evCache.appendOrAdd(key2, value3, null, 1000, EVCacheLatch.Policy.ALL);
latch.await(3000, TimeUnit.MILLISECONDS);
assertEquals(evCache.get(key2), value2 + value3);
}
// appendoradd - add case
String key3 = Long.toString(System.currentTimeMillis());
String value4 = UUID.randomUUID().toString();
if (isHashingEnabled) {
assertThrows(EVCacheException.class, () -> {
evCache.appendOrAdd(key3, value4, null, 1000, EVCacheLatch.Policy.ALL);
});
} else {
latch = evCache.appendOrAdd(key3, value4, null, 1000, EVCacheLatch.Policy.ALL);
latch.await(3000, TimeUnit.MILLISECONDS);
// get
assertEquals(evCache.get(key3), value4);
}
// append
String value5 = UUID.randomUUID().toString();
if (isHashingEnabled) {
assertThrows(EVCacheException.class, () -> {
evCache.append(key3, value5, 1000);
});
} else {
Future<Boolean> futures[] = evCache.append(key3, value5, 1000);
for (Future future : futures) {
assertTrue((Boolean) future.get());
}
// get
assertEquals(evCache.get(key3), value4 + value5);
}
String key4 = Long.toString(System.currentTimeMillis());
assertEquals(evCache.incr(key4, 1, 10, 1000), 10);
assertEquals(evCache.incr(key4, 10, 10, 1000), 20);
// decr
String key5 = Long.toString(System.currentTimeMillis());
assertEquals(evCache.decr(key5, 1, 10, 1000), 10);
assertEquals(evCache.decr(key5, 20, 10, 1000), 0);
// delete
latch = evCache.delete(key1, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
latch = evCache.delete(key2, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
latch = evCache.delete(key3, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
latch = evCache.delete(key4, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
latch = evCache.delete(key5, EVCacheLatch.Policy.ALL);
latch.await(1000, TimeUnit.MILLISECONDS);
// test expiry
String key6 = Long.toString(System.currentTimeMillis());
assertEquals(evCache.incr(key6, 1, 10, 5), 10);
Thread.sleep(5000);
assertNull(evCache.get(key6));
assertNull(evCache.get(key1));
assertNull(evCache.get(key2));
assertNull(evCache.get(key3));
assertNull(evCache.get(key4));
assertNull(evCache.get(key5));
}
public void testAll() {
try {
setupEnv();
testEVCache();
testDelete();
testAdd();
Thread.sleep(500);
// testInsertBinary();
testInsert();
int i = 0;
while (i++ < loops*1000) {
try {
testInsert();
testGet();
testGetAndTouch();
testBulk();
testBulkAndTouch();
testGetObservable();
testGetAndTouchObservable();
waitForCallbacks();
testAppendOrAdd();
testTouch();
testDelete();
testInsert();
if(i % 2 == 0) testDelete();
testAdd();
Thread.sleep(100);
} catch (Throwable e) {
log.error(e.getMessage(), e);
}
}
if (log.isDebugEnabled()) log.debug("All Done!!!. Will exit.");
System.exit(0);
} catch (Exception e) {
e.printStackTrace();
log.error(e.getMessage(), e);
}
}
public void onComplete(EVCacheOperationFuture<String> future) throws Exception {
if (log.isDebugEnabled()) log.debug("getl : key : " + future.getKey() + ", val = " + future.get());
}
}
| 822 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/EVCacheModule.java
|
package com.netflix.evcache;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import com.google.inject.*;
import com.netflix.archaius.api.annotations.ConfigurationSource;
import com.netflix.evcache.connection.DIConnectionModule;
import com.netflix.evcache.connection.IConnectionBuilder;
import com.netflix.evcache.event.hotkey.HotKeyListener;
import com.netflix.evcache.event.throttle.ThrottleListener;
import com.netflix.evcache.pool.EVCacheClientPoolManager;
import com.netflix.evcache.pool.EVCacheNodeList;
import com.netflix.evcache.util.EVCacheConfig;
import com.netflix.evcache.pool.eureka.DIEVCacheNodeListProvider;
import com.netflix.evcache.version.VersionTracker;
@Singleton
@SuppressWarnings("deprecation")
public class EVCacheModule extends AbstractModule {
public EVCacheModule() {
}
@Singleton
@ConfigurationSource("evcache")
public static class EVCacheModuleConfigLoader {
@Inject
public EVCacheModuleConfigLoader(Injector injector, EVCacheModule module) {
if(injector.getExistingBinding(Key.get(IConnectionBuilder.class)) == null) {
module.install(new DIConnectionModule());
}
}
}
@Override
protected void configure() {
// Make sure connection factory provider Module is initialized in your Module when you init EVCacheModule
bind(EVCacheModuleConfigLoader.class).asEagerSingleton();
bind(EVCacheNodeList.class).toProvider(DIEVCacheNodeListProvider.class);
bind(EVCacheClientPoolManager.class).asEagerSingleton();
bind(HotKeyListener.class).asEagerSingleton();
bind(ThrottleListener.class).asEagerSingleton();
bind(VersionTracker.class).asEagerSingleton();
requestStaticInjection(EVCacheModuleConfigLoader.class);
requestStaticInjection(EVCacheConfig.class);
}
@Inject
EVCacheClientPoolManager manager;
@PostConstruct
public void init() {
if(manager != null) {
manager.initAtStartup();
} else {
EVCacheClientPoolManager.getInstance().initAtStartup();
}
}
@PreDestroy
public void shutdown() {
if(manager != null) {
manager.shutdown();
} else {
EVCacheClientPoolManager.getInstance().shutdown();
}
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj != null) && (obj.getClass() == getClass());
}
}
| 823 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/connection/DIConnectionFactoryBuilderProvider.java
|
package com.netflix.evcache.connection;
import com.netflix.archaius.api.Property;
import com.netflix.archaius.api.PropertyRepository;
import com.netflix.discovery.EurekaClient;
import com.netflix.evcache.pool.EVCacheClient;
import com.netflix.evcache.util.EVCacheConfig;
import net.spy.memcached.ConnectionFactory;
import javax.inject.Inject;
import javax.inject.Provider;
public class DIConnectionFactoryBuilderProvider extends ConnectionFactoryBuilder implements Provider<IConnectionBuilder> {
private final EurekaClient eurekaClient;
private final PropertyRepository props;
@Inject
public DIConnectionFactoryBuilderProvider(EurekaClient eurekaClient, PropertyRepository props) {
this.eurekaClient = eurekaClient;
this.props = props;
}
@Override
public ConnectionFactoryBuilder get() {
return this;
}
public int getMaxQueueLength(String appName) {
return props.get(appName + ".max.queue.length", Integer.class).orElse(16384).get();
}
public int getOPQueueMaxBlockTime(String appName) {
return props.get(appName + ".operation.QueueMaxBlockTime", Integer.class).orElse(10).get();
}
public Property<Integer> getOperationTimeout(String appName) {
return props.get(appName + ".operation.timeout", Integer.class).orElse(2500);
}
public boolean useBinaryProtocol() {
return EVCacheConfig.getInstance().getPropertyRepository().get("evcache.use.binary.protocol", Boolean.class).orElse(true).get();
}
public EurekaClient getEurekaClient() {
return eurekaClient;
}
public PropertyRepository getProps() {
return props;
}
@Override
public ConnectionFactory getConnectionFactory(EVCacheClient client) {
final String appName = client.getAppName();
if(useBinaryProtocol())
return new DIConnectionFactory(client, eurekaClient, getMaxQueueLength(appName), getOperationTimeout(appName), getOPQueueMaxBlockTime(appName));
else return new DIAsciiConnectionFactory(client, eurekaClient, getMaxQueueLength(appName), getOperationTimeout(appName), getOPQueueMaxBlockTime(appName));
}
}
| 824 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/connection/DIConnectionModule.java
|
package com.netflix.evcache.connection;
import com.google.inject.AbstractModule;
import com.google.inject.Singleton;
@Singleton
public class DIConnectionModule extends AbstractModule {
public DIConnectionModule() {
}
@Override
protected void configure() {
bind(IConnectionBuilder.class).toProvider(DIConnectionFactoryBuilderProvider.class);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj != null) && (obj.getClass() == getClass());
}
}
| 825 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/connection/DIAsciiConnectionFactory.java
|
package com.netflix.evcache.connection;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.archaius.api.Property;
import com.netflix.discovery.EurekaClient;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import com.netflix.evcache.pool.DIEVCacheKetamaNodeLocatorConfiguration;
import com.netflix.evcache.pool.EVCacheClient;
import com.netflix.evcache.pool.EVCacheNodeLocator;
import net.spy.memcached.DefaultHashAlgorithm;
import net.spy.memcached.MemcachedNode;
import net.spy.memcached.NodeLocator;
public class DIAsciiConnectionFactory extends BaseAsciiConnectionFactory {
private static Logger log = LoggerFactory.getLogger(DIAsciiConnectionFactory.class);
private final EurekaClient eurekaClient;
DIAsciiConnectionFactory(EVCacheClient client, EurekaClient eurekaClient, int len, Property<Integer> operationTimeout, long opMaxBlockTime) {
super(client, len, operationTimeout, opMaxBlockTime);
client.addTag(EVCacheMetricsFactory.CONNECTION, "ASCII");
this.eurekaClient = eurekaClient;
if(log.isInfoEnabled()) log.info("Using ASCII Connection Factory!!!");
}
@Override
public NodeLocator createLocator(List<MemcachedNode> list) {
this.locator = new EVCacheNodeLocator(client, list, DefaultHashAlgorithm.KETAMA_HASH, new DIEVCacheKetamaNodeLocatorConfiguration(client, eurekaClient));
return locator;
}
}
| 826 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/connection/DIConnectionFactory.java
|
package com.netflix.evcache.connection;
import com.netflix.archaius.api.Property;
import com.netflix.discovery.EurekaClient;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import com.netflix.evcache.pool.DIEVCacheKetamaNodeLocatorConfiguration;
import com.netflix.evcache.pool.EVCacheClient;
import com.netflix.evcache.pool.EVCacheNodeLocator;
import net.spy.memcached.DefaultHashAlgorithm;
import net.spy.memcached.MemcachedNode;
import net.spy.memcached.NodeLocator;
import java.util.List;
public class DIConnectionFactory extends BaseConnectionFactory {
private final EurekaClient eurekaClient;
DIConnectionFactory(EVCacheClient client, EurekaClient eurekaClient, int len, Property<Integer> operationTimeout, long opMaxBlockTime) {
super(client, len, operationTimeout, opMaxBlockTime);
client.addTag(EVCacheMetricsFactory.CONNECTION, "BINARY");
this.eurekaClient = eurekaClient;
}
@Override
public NodeLocator createLocator(List<MemcachedNode> list) {
this.locator = new EVCacheNodeLocator(client, list, DefaultHashAlgorithm.KETAMA_HASH, new DIEVCacheKetamaNodeLocatorConfiguration(client, eurekaClient));
return locator;
}
}
| 827 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/version/VersionTracker.java
|
package com.netflix.evcache.version;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import com.netflix.evcache.pool.EVCacheClientPoolManager;
import com.netflix.spectator.api.BasicTag;
import com.netflix.spectator.api.Tag;
import javax.inject.Inject;
import javax.inject.Singleton;
@Singleton
public class VersionTracker implements Runnable {
private static final Logger log = LoggerFactory.getLogger(VersionTracker.class);
private AtomicLong versionGauge;
private EVCacheClientPoolManager poolManager;
@Inject
public VersionTracker(EVCacheClientPoolManager poolManager) {
this.poolManager = poolManager;
poolManager.getEVCacheScheduledExecutor().schedule(this, 30, TimeUnit.SECONDS);
}
public void run() {
// init the version information
if(versionGauge == null) {
final String fullVersion;
final String jarName;
if(this.getClass().getPackage().getImplementationVersion() != null) {
fullVersion = this.getClass().getPackage().getImplementationVersion();
} else {
fullVersion = "unknown";
}
if(this.getClass().getPackage().getImplementationVersion() != null) {
jarName = this.getClass().getPackage().getImplementationTitle();
} else {
jarName = "unknown";
}
if(log.isInfoEnabled()) log.info("fullVersion : " + fullVersion + "; jarName : " + jarName);
final List<Tag> tagList = new ArrayList<Tag>(3);
tagList.add(new BasicTag("version", fullVersion));
tagList.add(new BasicTag("jarName", jarName));
versionGauge = EVCacheMetricsFactory.getInstance().getLongGauge("evcache-client", tagList);
}
versionGauge.set(Long.valueOf(1));
poolManager.getEVCacheScheduledExecutor().schedule(this, 30, TimeUnit.SECONDS);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj != null) && (obj.getClass() == getClass());
}
}
| 828 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/pool/DIEVCacheKetamaNodeLocatorConfiguration.java
|
package com.netflix.evcache.pool;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.discovery.EurekaClient;
import com.netflix.discovery.shared.Application;
import net.spy.memcached.MemcachedNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.List;
public class DIEVCacheKetamaNodeLocatorConfiguration extends EVCacheKetamaNodeLocatorConfiguration {
private static final Logger log = LoggerFactory.getLogger(DIEVCacheKetamaNodeLocatorConfiguration.class);
private final EurekaClient eurekaClient;
public DIEVCacheKetamaNodeLocatorConfiguration(EVCacheClient client, EurekaClient eurekaClient) {
super(client);
this.eurekaClient = eurekaClient;
}
/**
* Returns the socket address of a given MemcachedNode.
*
* @param node - The MemcachedNode which we're interested in
* @return The socket address of the given node format is of the following
* format "publicHostname/privateIp:port" (ex -
ec2-174-129-159-31.compute-1.amazonaws.com/10.125.47.114:11211)
*/
@Override
public String getKeyForNode(MemcachedNode node, int repetition) {
String result = socketAddresses.get(node);
if(result == null) {
final SocketAddress socketAddress = node.getSocketAddress();
if(socketAddress instanceof InetSocketAddress) {
final InetSocketAddress isa = (InetSocketAddress)socketAddress;
if(eurekaClient != null ) {
final Application app = eurekaClient.getApplication(client.getAppName());
if(app != null) {
final List<InstanceInfo> instances = app.getInstances();
for(InstanceInfo info : instances) {
final String hostName = info.getHostName();
if(hostName.equalsIgnoreCase(isa.getHostName())) {
final String ip = info.getIPAddr();
result = hostName + '/' + ip + ":11211";
break;
}
}
} else {
result = ((InetSocketAddress)socketAddress).getHostName() + '/' + ((InetSocketAddress)socketAddress).getAddress().getHostAddress() + ":11211";
}
} else {
result = isa.getHostName() + '/' + isa.getAddress().getHostAddress() + ":11211";
}
} else {
result=String.valueOf(socketAddress);
if (result.startsWith("/")) {
result = result.substring(1);
}
}
socketAddresses.put(node, result);
}
if(log.isDebugEnabled()) log.debug("Returning : " + (result + "-" + repetition));
return result + "-" + repetition;
}
@Override
public String toString() {
return "DIEVCacheKetamaNodeLocatorConfiguration [" + super.toString() + ", EurekaClient=" + eurekaClient + "]";
}
}
| 829 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/pool
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/pool/eureka/EurekaNodeListProvider.java
|
package com.netflix.evcache.pool.eureka;
import com.google.common.net.InetAddresses;
import com.netflix.appinfo.AmazonInfo;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.appinfo.DataCenterInfo;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.appinfo.InstanceInfo.InstanceStatus;
import com.netflix.archaius.api.Property;
import com.netflix.archaius.api.PropertyRepository;
import com.netflix.discovery.EurekaClient;
import com.netflix.discovery.shared.Application;
import com.netflix.evcache.metrics.EVCacheMetricsFactory;
import com.netflix.evcache.pool.EVCacheClientPool;
import com.netflix.evcache.pool.EVCacheNodeList;
import com.netflix.evcache.pool.EVCacheServerGroupConfig;
import com.netflix.evcache.pool.ServerGroup;
import com.netflix.spectator.api.BasicTag;
import com.netflix.spectator.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class EurekaNodeListProvider implements EVCacheNodeList {
private static final Logger log = LoggerFactory.getLogger(EurekaNodeListProvider.class);
private final EurekaClient _eurekaClient;
private PropertyRepository props;
private final ApplicationInfoManager applicationInfoManager;
@SuppressWarnings("rawtypes") // Archaius2 PropertyRepository does not support ParameterizedTypes
private Property<Set> ignoreHosts = null;
public EurekaNodeListProvider(ApplicationInfoManager applicationInfoManager, EurekaClient eurekaClient, PropertyRepository props) {
this.applicationInfoManager = applicationInfoManager;
this._eurekaClient = eurekaClient;
this.props = props;
}
/*
* (non-Javadoc)
*
* @see com.netflix.evcache.pool.EVCacheNodeList#discoverInstances()
*/
@Override
public Map<ServerGroup, EVCacheServerGroupConfig> discoverInstances(String _appName) throws IOException {
final Property<Boolean> ignoreAppEurekaStatus = props.get("evcache.ignoreAppEurekaStatus", Boolean.class).orElse(false);
if (ignoreAppEurekaStatus.get())
log.info("Not going to consider the eureka status of the application, to initialize evcache client.");
if (!ignoreAppEurekaStatus.get() && (applicationInfoManager.getInfo().getStatus() == InstanceStatus.DOWN)) {
log.info("Not initializing evcache client as application eureka status is DOWN. " +
"One can override this behavior by setting evcache.ignoreAppEurekaStatus property to true, scoped to your application.");
return Collections.<ServerGroup, EVCacheServerGroupConfig> emptyMap();
}
/* Get a list of EVCACHE instances from the DiscoveryManager */
final Application app = _eurekaClient.getApplication(_appName);
if (app == null) return Collections.<ServerGroup, EVCacheServerGroupConfig> emptyMap();
final List<InstanceInfo> appInstances = app.getInstances();
final Map<ServerGroup, EVCacheServerGroupConfig> instancesSpecific = new HashMap<ServerGroup, EVCacheServerGroupConfig>();
/* Iterate all the discovered instances to find usable ones */
for (InstanceInfo iInfo : appInstances) {
final DataCenterInfo dcInfo = iInfo.getDataCenterInfo();
if (dcInfo == null) {
if (log.isErrorEnabled()) log.error("Data Center Info is null for appName - " + _appName);
continue;
}
/* Only AWS instances are usable; bypass all others */
if (DataCenterInfo.Name.Amazon != dcInfo.getName() || !(dcInfo instanceof AmazonInfo)) {
log.error("This is not an AWSDataCenter. You will not be able to use Discovery Nodelist Provider. Cannot proceed. " +
"DataCenterInfo : {}; appName - {}. Please use SimpleNodeList provider and specify the server groups manually.",
dcInfo, _appName);
continue;
}
final AmazonInfo amznInfo = (AmazonInfo) dcInfo;
// We checked above if this instance is Amazon so no need to do a instanceof check
final String zone = amznInfo.get(AmazonInfo.MetaDataKey.availabilityZone);
if(zone == null) {
final List<Tag> tagList = new ArrayList<Tag>(3);
EVCacheMetricsFactory.getInstance().addAppNameTags(tagList, _appName);
tagList.add(new BasicTag(EVCacheMetricsFactory.CONFIG_NAME, EVCacheMetricsFactory.NULL_ZONE));
EVCacheMetricsFactory.getInstance().increment(EVCacheMetricsFactory.CONFIG, tagList);
continue;
}
final String asgName = iInfo.getASGName();
if(asgName == null) {
final List<Tag> tagList = new ArrayList<Tag>(3);
EVCacheMetricsFactory.getInstance().addAppNameTags(tagList, _appName);
tagList.add(new BasicTag(EVCacheMetricsFactory.CONFIG_NAME, EVCacheMetricsFactory.NULL_SERVERGROUP));
EVCacheMetricsFactory.getInstance().increment(EVCacheMetricsFactory.CONFIG, tagList);
continue;
}
final Property<Boolean> asgEnabled = props.get(asgName + ".enabled", Boolean.class).orElse(true);
if (!asgEnabled.get()) {
if(log.isDebugEnabled()) log.debug("ASG " + asgName + " is disabled so ignoring it");
continue;
}
final Map<String, String> metaInfo = iInfo.getMetadata();
final int evcachePort = Integer.parseInt((metaInfo != null && metaInfo.containsKey("evcache.port")) ?
metaInfo.get("evcache.port") : EVCacheClientPool.DEFAULT_PORT);
int port = evcachePort;
final Property<Boolean> isSecure = props.get(asgName + ".use.secure", Boolean.class)
.orElseGet(_appName + ".use.secure")
.orElseGet("evcache.use.secure")
.orElse(false);
if(isSecure.get()) {
port = Integer.parseInt((metaInfo != null && metaInfo.containsKey("evcache.secure.port")) ?
metaInfo.get("evcache.secure.port") : EVCacheClientPool.DEFAULT_SECURE_PORT);
}
final ServerGroup serverGroup = new ServerGroup(zone, asgName);
final Set<InetSocketAddress> instances;
final EVCacheServerGroupConfig config;
if (instancesSpecific.containsKey(serverGroup)) {
config = instancesSpecific.get(serverGroup);
instances = config.getInetSocketAddress();
} else {
instances = new HashSet<InetSocketAddress>();
config = new EVCacheServerGroupConfig(serverGroup, instances);
instancesSpecific.put(serverGroup, config);
//EVCacheMetricsFactory.getInstance().getRegistry().gauge(EVCacheMetricsFactory.getInstance().getRegistry().createId(_appName + "-port", "ServerGroup", asgName, "APP", _appName), Long.valueOf(port));
}
/* Don't try to use downed instances */
final InstanceStatus status = iInfo.getStatus();
if (status == null || InstanceStatus.OUT_OF_SERVICE == status || InstanceStatus.DOWN == status) {
if (log.isDebugEnabled()) log.debug("The Status of the instance in Discovery is " + status + ". App Name : " + _appName + "; Zone : " + zone
+ "; Host : " + iInfo.getHostName() + "; Instance Id - " + iInfo.getId());
continue;
}
final InstanceInfo myInfo = applicationInfoManager.getInfo();
final DataCenterInfo myDC = myInfo.getDataCenterInfo();
final AmazonInfo myAmznDC = (myDC instanceof AmazonInfo) ? (AmazonInfo) myDC : null;
final String myInstanceId = myInfo.getInstanceId();
final String myIp = myInfo.getIPAddr();
final String myPublicHostName = (myAmznDC != null) ? myAmznDC.get(AmazonInfo.MetaDataKey.publicHostname) : null;
boolean isInCloud = false;
if (myPublicHostName != null) {
isInCloud = myPublicHostName.startsWith("ec2");
}
if (!isInCloud) {
if (myAmznDC != null && myAmznDC.get(AmazonInfo.MetaDataKey.vpcId) != null) {
isInCloud = true;
} else {
if (myIp.equals(myInstanceId)) {
isInCloud = false;
}
}
}
final String myZone = (myAmznDC != null) ? myAmznDC.get(AmazonInfo.MetaDataKey.availabilityZone) : null;
final String myRegion = (myZone != null) ? myZone.substring(0, myZone.length() - 1) : null;
final String region = (zone != null) ? zone.substring(0, zone.length() - 1) : null;
final String host = amznInfo.get(AmazonInfo.MetaDataKey.publicHostname);
InetSocketAddress address = null;
final String vpcId = amznInfo.get(AmazonInfo.MetaDataKey.vpcId);
final String localIp = amznInfo.get(AmazonInfo.MetaDataKey.localIpv4);
if (log.isDebugEnabled()) log.debug("myZone - " + myZone + "; zone : " + zone + "; myRegion : " + myRegion + "; region : " + region + "; host : " + host + "; vpcId : " + vpcId);
if(ignoreHosts == null) ignoreHosts = props.get(_appName + ".ignore.hosts", Set.class).orElse(Collections.emptySet());
if(localIp != null && ignoreHosts.get().contains(localIp)) continue;
if(host != null && ignoreHosts.get().contains(host)) continue;
if (vpcId != null) {
final InetAddress add = InetAddresses.forString(localIp);
final InetAddress inetAddress = InetAddress.getByAddress(localIp, add.getAddress());
address = new InetSocketAddress(inetAddress, port);
if (log.isDebugEnabled()) log.debug("VPC : localIp - " + localIp + " ; add : " + add + "; inetAddress : " + inetAddress + "; address - " + address
+ "; App Name : " + _appName + "; Zone : " + zone + "; myZone - " + myZone + "; Host : " + iInfo.getHostName() + "; Instance Id - " + iInfo.getId());
} else {
if(host != null && host.startsWith("ec2")) {
final InetAddress inetAddress = (localIp != null) ? InetAddress.getByAddress(host, InetAddresses.forString(localIp).getAddress()) : InetAddress.getByName(host);
address = new InetSocketAddress(inetAddress, port);
if (log.isDebugEnabled()) log.debug("myZone - " + myZone + ". host : " + host
+ "; inetAddress : " + inetAddress + "; address - " + address + "; App Name : " + _appName
+ "; Zone : " + zone + "; Host : " + iInfo.getHostName() + "; Instance Id - " + iInfo.getId());
} else {
final String ipToUse = (isInCloud) ? localIp : amznInfo.get(AmazonInfo.MetaDataKey.publicIpv4);
final InetAddress add = InetAddresses.forString(ipToUse);
final InetAddress inetAddress = InetAddress.getByAddress(ipToUse, add.getAddress());
address = new InetSocketAddress(inetAddress, port);
if (log.isDebugEnabled()) log.debug("CLASSIC : IPToUse - " + ipToUse + " ; add : " + add + "; inetAddress : " + inetAddress + "; address - " + address
+ "; App Name : " + _appName + "; Zone : " + zone + "; myZone - " + myZone + "; Host : " + iInfo.getHostName() + "; Instance Id - " + iInfo.getId());
}
}
instances.add(address);
}
return instancesSpecific;
}
}
| 830 |
0 |
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/pool
|
Create_ds/EVCache/evcache-client/src/main/java/com/netflix/evcache/pool/eureka/DIEVCacheNodeListProvider.java
|
package com.netflix.evcache.pool.eureka;
import javax.inject.Inject;
import javax.inject.Provider;
import com.netflix.archaius.api.PropertyRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.discovery.EurekaClient;
import com.netflix.evcache.pool.EVCacheNodeList;
import com.netflix.evcache.pool.SimpleNodeListProvider;
public class DIEVCacheNodeListProvider implements Provider<EVCacheNodeList> {
private static final Logger log = LoggerFactory.getLogger(DIEVCacheNodeListProvider.class);
private final EurekaClient eurekaClient;
private PropertyRepository props;
private final ApplicationInfoManager applicationInfoManager;
@Inject
public DIEVCacheNodeListProvider(ApplicationInfoManager applicationInfoManager, EurekaClient eurekaClient, PropertyRepository props) {
this.applicationInfoManager = applicationInfoManager;
this.eurekaClient = eurekaClient;
this.props = props;
}
@Override
public EVCacheNodeList get() {
final EVCacheNodeList provider;
if (props.get("evcache.use.simple.node.list.provider", Boolean.class).orElse(false).get()) {
provider = new SimpleNodeListProvider();
} else {
provider = new EurekaNodeListProvider(applicationInfoManager, eurekaClient, props);
}
if(log.isDebugEnabled()) log.debug("EVCache Node List Provider : " + provider);
return provider;
}
}
| 831 |
0 |
Create_ds/neptune-export/src/test/java/org/apache/tinkerpop/gremlin
|
Create_ds/neptune-export/src/test/java/org/apache/tinkerpop/gremlin/driver/LBAwareSigV4WebSocketChannelizerTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package org.apache.tinkerpop.gremlin.driver;
import com.amazonaws.services.neptune.auth.HandshakeRequestConfig;
import com.amazonaws.services.neptune.auth.LBAwareAwsSigV4ClientHandshaker;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.embedded.EmbeddedChannel;
import org.apache.tinkerpop.gremlin.driver.handler.WebSocketClientHandler;
import org.junit.Test;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class LBAwareSigV4WebSocketChannelizerTest {
@Test
public void configureShouldAddSigV4HandshakerToPipeline() throws URISyntaxException {
System.setProperty("SERVICE_REGION", "us-west-2");
ChannelPipeline mockedPipeline = new EmbeddedChannel().pipeline();
LBAwareSigV4WebSocketChannelizer channelizer = new LBAwareSigV4WebSocketChannelizer();
Connection mockedConnection = mock(Connection.class);
Cluster mockedCluster = mock(Cluster.class);
when(mockedConnection.getCluster()).thenReturn(mockedCluster);
when(mockedConnection.getUri()).thenReturn(new URI("ws:localhost"));
when(mockedCluster.connectionPoolSettings()).thenReturn(mock(Settings.ConnectionPoolSettings.class));
when(mockedCluster.authProperties()).thenReturn(new AuthProperties().with(AuthProperties.Property.JAAS_ENTRY, new HandshakeRequestConfig(Collections.emptyList(), 8182, false).value()));
channelizer.init(mockedConnection);
channelizer.configure(mockedPipeline);
ChannelHandler handler = mockedPipeline.get(LBAwareSigV4WebSocketChannelizer.WEB_SOCKET_HANDLER);
assertTrue(handler instanceof WebSocketClientHandler);
assertTrue(((WebSocketClientHandler) handler).handshaker() instanceof LBAwareAwsSigV4ClientHandshaker);
}
}
| 832 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/AbstractExportIntegrationTest.java
|
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
public abstract class AbstractExportIntegrationTest {
protected static String neptuneEndpoint;
protected File outputDir;
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
@BeforeClass
public static void setupClass(){
neptuneEndpoint = System.getenv("NEPTUNE_ENDPOINT");
assertNotNull("endpoint must be provided through \"NEPTUNE_ENDPOINT\" environment variable", neptuneEndpoint);
fillDbWithTestData(neptuneEndpoint);
}
@Before
public void setup() throws IOException {
outputDir = tempFolder.newFolder();
}
private static void fillDbWithTestData(final String neptuneEndpoint) {
//TODO:: For now assume that correct data is pre-loaded into DB.
// Cluster cluster = Cluster.build(neptuneEndpoint).enableSsl(true).create();
// GraphTraversalSource g = traversal().withRemote(DriverRemoteConnection.using(cluster, "g"));
}
protected void assertEquivalentResults(final File expected, final File actual) {
GraphSchema config = null;
try {
config = new JsonResource<GraphSchema, Boolean>(
"Config file",
new URI(expected.getPath() + "/config.json"),
GraphSchema.class).get();
} catch (IOException e) {
throw new RuntimeException(e);
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
assertTrue("stats.json does not match expected results", areJsonContentsEqual(expected.listFiles((dir, name) -> name.equals("stats.json"))[0], actual.listFiles((dir, name) -> name.equals("stats.json"))[0]));
assertTrue("config.json does not match expected results", areJsonContentsEqual(expected.listFiles((dir, name) -> name.equals("config.json"))[0], actual.listFiles((dir, name) -> name.equals("config.json"))[0]));
if (expected.listFiles(((dir, name) -> name.equals("nodes"))).length >= 1) {
assertTrue("nodes directory does not match expected results", areDirContentsEquivalent(expected + "/nodes", actual + "/nodes", config));
}
if (expected.listFiles(((dir, name) -> name.equals("edges"))).length >= 1) {
assertTrue("edges directory does not match expected results", areDirContentsEquivalent(expected + "/edges", actual + "/edges", config));
}
}
protected boolean areJsonContentsEqual(final File expected, final File actual) {
final ObjectMapper mapper = new ObjectMapper();
try {
JsonNode expectedTree = mapper.readTree(expected);
JsonNode actualTree = mapper.readTree(actual);
return expectedTree.equals(actualTree);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected boolean areDirContentsEquivalent(final String expectedPath, final String actualPath, final GraphSchema config) {
final File expectedDir = new File(expectedPath);
final File actualDir = new File(actualPath);
assertTrue("Expected path to a directory", expectedDir.isDirectory() && actualDir.isDirectory());
GraphElementSchemas schemas;
if(expectedDir.getName().equals("nodes")) {
if(!config.hasNodeSchemas()) {
return true;
}
schemas = config.graphElementSchemasFor(GraphElementType.nodes);
} else if(expectedDir.getName().equals("edges")) {
if(!config.hasEdgeSchemas()) {
return true;
}
schemas = config.graphElementSchemasFor(GraphElementType.edges);
} else {
throw new IllegalArgumentException("directory must end in either /nodes or /edges");
}
for(Label l : schemas.labels()) {
String label = l.fullyQualifiedLabel();
if(!areLabelledDirContentsEquivalent(expectedDir, actualDir, label)) {
return false;
}
}
return true;
}
protected boolean areLabelledDirContentsEquivalent(final File expectedDir, final File actualDir, final String label) {
final String escapedLabel = label.replaceAll("\\(", "%28").replaceAll("\\)", "%29");
final List<String> expectedNodes = new ArrayList<>();
final List<String> actualNodes = new ArrayList<>();
for(File file : expectedDir.listFiles((dir, name) -> name.startsWith(escapedLabel))){
try {
CSVParser parser = CSVParser.parse(file, StandardCharsets.UTF_8, CSVFormat.RFC4180);
Collection<String> list = parser.stream()
.map(csvRecord -> csvRecord.toList().toString())
.collect(Collectors.toList());
expectedNodes.addAll(list);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
for(File file : actualDir.listFiles((dir, name) -> name.startsWith(escapedLabel))){
try {
CSVParser parser = CSVParser.parse(file, StandardCharsets.UTF_8, CSVFormat.RFC4180);
Collection<String> list = parser.stream()
.map(csvRecord -> csvRecord.toList().toString())
.collect(Collectors.toList());
actualNodes.addAll(list);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return expectedNodes.containsAll(actualNodes) && actualNodes.containsAll(expectedNodes);
}
}
| 833 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/CreatePgConfigIntegrationTest.java
|
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportRunner;
import org.junit.Test;
import java.io.File;
public class CreatePgConfigIntegrationTest extends AbstractExportIntegrationTest{
@Test
public void testCreatePgConfig() {
final String[] command = {"create-pg-config", "-e", neptuneEndpoint, "-d", outputDir.getPath()};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testCreatePgConfig"), resultDir);
}
@Test
public void testCreatePgConfigWithGremlinFilter() {
final String[] command = {"create-pg-config", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--gremlin-filter", "has(\"runways\", 2)"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testCreatePgConfigWithGremlinFilter"), resultDir);
}
@Test
public void testCreatePgConfigWithEdgeGremlinFilter() {
final String[] command = {"create-pg-config", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--gremlin-filter", "hasLabel(\"route\")"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testCreatePgConfigWithEdgeGremlinFilter"), resultDir);
}
@Test
public void testCreatePgConfigWithEdgeGremlinFilterAndEarlyGremlinFilter() {
final String[] command = {"create-pg-config", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--gremlin-filter", "hasLabel(\"route\")", "--filter-edges-early"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testCreatePgConfigWithEdgeGremlinFilter"), resultDir);
}
}
| 834 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/ExportPgFromQueriesIntegrationTest.java
|
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportRunner;
import com.amazonaws.services.neptune.propertygraph.io.JsonResource;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import static org.junit.Assert.assertTrue;
public class ExportPgFromQueriesIntegrationTest extends AbstractExportIntegrationTest{
@Test
public void testExportPgFromQueries() {
final String[] command = {"export-pg-from-queries", "-e", neptuneEndpoint,
"-d", outputDir.getPath(),
"-q", "airport=g.V().hasLabel('airport').has('runways', gt(2)).project('code', 'runways', 'city', 'country').by('code').by('runways').by('city').by('country')"
};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportPgFromQueries"), resultDir);
}
@Test
public void testExportPgFromQueriesWithStructuredOutput() {
final String[] command = {"export-pg-from-queries", "-e", neptuneEndpoint,
"-d", outputDir.getPath(),
"-q", "airport=g.V().union(hasLabel('airport'), outE()).elementMap()",
"--include-type-definitions",
"--structured-output"
};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentStructuredOutput(new File("src/test/resources/IntegrationTest/testExportPgFromQueriesStructuredOutput"), resultDir);
}
@Test
public void testExportPgFromQueriesWithStructuredOutputWithEdgeAndVertexLabels() {
final String[] command = {"export-pg-from-queries", "-e", neptuneEndpoint,
"-d", outputDir.getPath(),
"-q", "airport=g.V().union(hasLabel('airport'), outE()).elementMap()",
"--include-type-definitions", "--edge-label-strategy", "edgeAndVertexLabels",
"--structured-output"
};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentStructuredOutput(new File("src/test/resources/IntegrationTest/testExportPgFromQueriesStructuredOutput"), resultDir);
}
@Override
protected void assertEquivalentResults(final File expected, final File actual) {
assertTrue("queries.json does not match expected results", areJsonContentsEqual(expected.listFiles((dir, name) -> name.equals("queries.json"))[0], actual.listFiles((dir, name) -> name.equals("queries.json"))[0]));
for (File expectedResultsDir : expected.listFiles((dir, name) -> name.equals("results"))[0].listFiles()) {
assertTrue(expectedResultsDir.isDirectory());
String dirName = expectedResultsDir.getName();
assertTrue("results/"+dirName+" directory does not match expected results", areLabelledDirContentsEquivalent(expectedResultsDir, new File(actual+"/results/"+dirName), dirName));
}
}
private void assertEquivalentStructuredOutput(final File expected, final File actual) {
GraphSchema config = null;
try {
config = new JsonResource<GraphSchema, Boolean>(
"Config file",
new URI(expected.getPath() + "/config.json"),
GraphSchema.class).get();
} catch (IOException e) {
throw new RuntimeException(e);
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
if (expected.listFiles(((dir, name) -> name.equals("nodes"))).length >= 1) {
assertTrue("nodes directory does not match expected results", areDirContentsEquivalent(expected + "/nodes", actual + "/nodes", config));
}
}
}
| 835 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/ExportPgFromConfigIntegrationTest.java
|
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportRunner;
import org.junit.Test;
import java.io.File;
public class ExportPgFromConfigIntegrationTest extends AbstractExportIntegrationTest{
@Test
public void testExportPgFromConfig() {
final String[] command = {"export-pg-from-config", "-e", neptuneEndpoint,
"-c", "src/test/resources/IntegrationTest/ExportPgFromConfigIntegrationTest/input/config.json",
"-d", outputDir.getPath()};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/ExportPgFromConfigIntegrationTest/testExportPgFromConfig"), resultDir);
}
@Test
public void testExportPgFromConfigWithGremlinFilter() {
final String[] command = {"export-pg-from-config", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"-c", "src/test/resources/IntegrationTest/ExportPgFromConfigIntegrationTest/input/config.json",
"--gremlin-filter", "has(\"runways\", 2)"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportPgToCsvWithGremlinFilter"), resultDir);
}
@Test
public void testExportEdgesFromConfigWithGremlinFilter() {
final String[] command = {"export-pg-from-config", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"-c", "src/test/resources/IntegrationTest/ExportPgFromConfigIntegrationTest/input/config.json",
"--gremlin-filter", "hasLabel(\"route\")"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportEdgesToCsvWithGremlinFilter"), resultDir);
}
@Test
public void testExportEdgesFromConfigWithGremlinFilterWithEarlyGremlinFilter() {
final String[] command = {"export-pg-from-config", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"-c", "src/test/resources/IntegrationTest/ExportPgFromConfigIntegrationTest/input/config.json",
"--gremlin-filter", "hasLabel(\"route\")", "--filter-edges-early"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportEdgesToCsvWithGremlinFilter"), resultDir);
}
}
| 836 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/ExportPgIntegrationTest.java
|
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportRunner;
import org.junit.Test;
import java.io.File;
public class ExportPgIntegrationTest extends AbstractExportIntegrationTest{
@Test
public void testExportPgToCsv() {
final String[] command = {"export-pg", "-e", neptuneEndpoint, "-d", outputDir.getPath()};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportPgToCsv"), resultDir);
}
@Test
public void testExportPgWithEdgeAndVertexLabels() {
final String[] command = {"export-pg", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--edge-label-strategy", "edgeAndVertexLabels"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportPgWithEdgeAndVertexLabels"), resultDir);
}
@Test
public void testExportPgToCsvWithJanus() {
final String[] command = {"export-pg", "-e", neptuneEndpoint, "-d", outputDir.getPath(), "--janus"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportPgToCsv"), resultDir);
}
@Test
public void testExportPgToCsvWithGremlinFilter() {
final String[] command = {"export-pg", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--gremlin-filter", "has(\"runways\", 2)"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportPgToCsvWithGremlinFilter"), resultDir);
}
@Test
public void testExportEdgesToCsvWithGremlinFilter() {
final String[] command = {"export-pg", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--gremlin-filter", "hasLabel(\"route\")"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportEdgesToCsvWithGremlinFilter"), resultDir);
}
@Test
public void testExportEdgesToCsvWithGremlinFilterWithEarlyGremlinFilter() {
final String[] command = {"export-pg", "-e", neptuneEndpoint, "-d", outputDir.getPath(),
"--gremlin-filter", "hasLabel(\"route\")", "--filter-edges-early"};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertEquivalentResults(new File("src/test/resources/IntegrationTest/testExportEdgesToCsvWithGremlinFilter"), resultDir);
}
}
| 837 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/ExportRdfIntegrationTest.java
|
package com.amazonaws.services.neptune;
import com.amazonaws.services.neptune.export.NeptuneExportRunner;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.rio.RDFParser;
import org.eclipse.rdf4j.rio.Rio;
import org.eclipse.rdf4j.rio.helpers.StatementCollector;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
public class ExportRdfIntegrationTest extends AbstractExportIntegrationTest{
@Test
public void testExportRdf() {
final String[] command = {"export-rdf", "-e", neptuneEndpoint, "-d", outputDir.getPath()};
final NeptuneExportRunner runner = new NeptuneExportRunner(command);
runner.run();
final File resultDir = outputDir.listFiles()[0];
assertTrue("Returned statements don't match expected", areStatementsEqual("src/test/resources/IntegrationTest/testExportRdf/statements/statements.ttl", resultDir+"/statements/statements.ttl"));
}
private boolean areStatementsEqual(final String expected, final String actual) {
final ArrayList expectedStatements = new ArrayList();
final ArrayList actualStatements = new ArrayList();
final RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
rdfParser.setRDFHandler(new StatementCollector(expectedStatements));
try {
rdfParser.parse(new FileInputStream(expected));
}
catch (Exception e) {
}
rdfParser.setRDFHandler(new StatementCollector(actualStatements));
try {
rdfParser.parse(new FileInputStream(actual));
}
catch (Exception e) {
}
return expectedStatements.containsAll(actualStatements) && actualStatements.containsAll(expectedStatements);
}
}
| 838 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/cluster/InstanceTypeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.cluster;
import org.junit.Test;
import static org.junit.Assert.*;
public class InstanceTypeTest {
@Test
public void shouldAllowNameWithOutWithDBPrefix(){
assertEquals(InstanceType.db_r5_8xlarge, InstanceType.parse("db.r5.8xlarge"));
assertEquals(InstanceType.db_r5_8xlarge, InstanceType.parse("r5.8xlarge"));
assertEquals(InstanceType.db_r5d_12xlarge, InstanceType.parse("db.r5d.12xlarge"));
assertEquals(InstanceType.db_r5d_12xlarge, InstanceType.parse("r5d.12xlarge"));
}
@Test
public void shouldRecognizeR6gInstanceTypePrefix(){
assertEquals(InstanceType.parse("db.r6g.16xlarge").concurrency(), 128);
}
}
| 839 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/cluster/GetLastEventIdTest.java
|
package com.amazonaws.services.neptune.cluster;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.*;
public class GetLastEventIdTest {
@Test
public void shouldReturnIntegerMaxValueForEngineVersions1041AndBelow(){
String expectedValue = String.valueOf(Integer.MAX_VALUE);
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.1.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.1.1"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.1.2"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.2.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.2.1"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.2.2"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.3.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.4.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.4.1"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.4.2"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.5.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.5.1"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.1.0.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.1.1.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.2.0.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.2.0.1"));
}
@Test
public void shouldReturnLongMaxValueForEngineVersions1041AndBelow(){
String expectedValue = String.valueOf(Long.MAX_VALUE);
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.1.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.1.1"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.1.2"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.2.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.2.1"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.2.2"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.3.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.4.0"));
Assert.assertNotEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.4.1"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.4.2"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.5.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.0.5.1"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.1.0.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.1.1.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.2.0.0"));
Assert.assertEquals(expectedValue, GetLastEventId.MaxCommitNumValueForEngine("1.2.0.1"));
}
}
| 840 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/util/MapUtils.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
import java.util.HashMap;
import java.util.Map;
public class MapUtils {
public static Map<?, ?> map(Entry... entries) {
HashMap<Object, Object> map = new HashMap<>();
for (Entry entry : entries) {
map.put(entry.key(), entry.value());
}
return map;
}
public static Entry entry(String key, Object value) {
return new Entry(key, value);
}
public static class Entry {
private final String key;
private final Object value;
private Entry(String key, Object value) {
this.key = key;
this.value = value;
}
public String key() {
return key;
}
public Object value() {
return value;
}
}
}
| 841 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/util/AWSCredentialsUtilTest.java
|
package com.amazonaws.services.neptune.util;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.services.securitytoken.model.AWSSecurityTokenServiceException;
import org.junit.Before;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
import static com.amazonaws.services.neptune.util.AWSCredentialsUtil.getProfileCredentialsProvider;
import static com.amazonaws.services.neptune.util.AWSCredentialsUtil.getSTSAssumeRoleCredentialsProvider;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class AWSCredentialsUtilTest {
TemporaryFolder tempFolder;
File credentialsFile;
@Before
public void setup() throws IOException {
tempFolder = new TemporaryFolder();
tempFolder.create();
credentialsFile = tempFolder.newFile("credentialsFile");
}
@Test
public void shouldGetDefaultCredsIfConfigIsNull() {
AWSCredentialsProvider provider = getProfileCredentialsProvider(null, null);
assertTrue(provider instanceof DefaultAWSCredentialsProviderChain);
}
@Test
public void shouldAttemptToUseProvidedPath() {
Throwable t = assertThrows(IllegalArgumentException.class, () -> getProfileCredentialsProvider(
null, tempFolder.getRoot().getAbsolutePath()+"/non-existent-file").getCredentials());
assertEquals("AWS credential profiles file not found in the given path: "+
tempFolder.getRoot().getAbsolutePath()+"/non-existent-file", t.getMessage());
}
@Test
public void shouldUseDefaultCredsIfProfileNameNull() {
Throwable t = assertThrows(IllegalArgumentException.class, () -> getProfileCredentialsProvider(
null, credentialsFile.getAbsolutePath()).getCredentials());
assertTrue(t.getMessage().contains("No AWS profile named 'default'"));
}
@Test
public void shouldAttemptToUseProvidedProfileName() {
Throwable t = assertThrows(IllegalArgumentException.class, () -> getProfileCredentialsProvider(
"test", credentialsFile.getAbsolutePath()).getCredentials());
assertTrue(t.getMessage().contains("No AWS profile named 'test'"));
}
@Test
public void shouldUseSourceCredsProviderWhenAssumingRole() {
AWSCredentialsProvider mockSourceCredsProvider = mock(AWSCredentialsProvider.class);
try {
getSTSAssumeRoleCredentialsProvider("fakeARN", "sessionName", null, mockSourceCredsProvider, "us-west-2")
.getCredentials();
}
catch (AWSSecurityTokenServiceException e) {} //Expected to fail as sourceCredsProvider does not have permission to assume role
Mockito.verify(mockSourceCredsProvider, Mockito.atLeast(1)).getCredentials();
}
}
| 842 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/util/TransferManagerWrapperTest.java
|
package com.amazonaws.services.neptune.util;
import com.amazonaws.SdkClientException;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AnonymousAWSCredentials;
import org.junit.Test;
import org.mockito.internal.verification.AtLeast;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TransferManagerWrapperTest {
private final String REGION = "us-west-2";
@Test
public void shouldHandleNullCredentialsProvider() {
TransferManagerWrapper wrapper = new TransferManagerWrapper(REGION, null);
assertNotNull(wrapper);
assertNotNull(wrapper.get());
assertNotNull(wrapper.get().getAmazonS3Client());
}
@Test
public void shouldUseProvidedCredentials() {
AWSCredentialsProvider mockCredentialsProvider = mock(AWSCredentialsProvider.class);
when(mockCredentialsProvider.getCredentials()).thenReturn(new AnonymousAWSCredentials());
TransferManagerWrapper wrapper = new TransferManagerWrapper(REGION, mockCredentialsProvider);
assertNotNull(wrapper);
assertNotNull(wrapper.get());
assertNotNull(wrapper.get().getAmazonS3Client());
//Expected to fail due to invalid credentials. This call is here to force the S3 client to call getCredentials()
try {
wrapper.get().getAmazonS3Client().listBuckets();
}
catch (SdkClientException e) {}
verify(mockCredentialsProvider, new AtLeast(1)).getCredentials();
}
}
| 843 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/util/SemicolonUtilsTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
import org.junit.Test;
import java.util.Collection;
import java.util.Iterator;
import static org.junit.Assert.*;
public class SemicolonUtilsTest {
@Test
public void shouldSplitStringOnSemicolons(){
Collection<String> results = SemicolonUtils.split("abc;def;ghi");
assertEquals(3, results.size());
Iterator<String> iterator = results.iterator();
assertEquals("abc", iterator.next());
assertEquals("def", iterator.next());
assertEquals("ghi", iterator.next());
}
@Test
public void shouldNotSplitOnEscapedSemicolon(){
Collection<String> results = SemicolonUtils.split("abc;d\\;ef;ghi");
assertEquals(3, results.size());
Iterator<String> iterator = results.iterator();
assertEquals("abc", iterator.next());
assertEquals("d\\;ef", iterator.next());
assertEquals("ghi", iterator.next());
}
@Test
public void shouldUnescapeEscapedSemicolonIfThereAreNoUnescapedSemicolonsInString(){
assertEquals("d;ef", SemicolonUtils.unescape("d\\;ef"));
}
@Test
public void shouldReturnEmptyCollectionForEmptyString(){
Collection<String> collection = SemicolonUtils.split("");
assertEquals(0, collection.size());
}
}
| 844 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/util/S3ObjectInfoTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.util;
import com.amazonaws.services.s3.Headers;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.SSEAlgorithm;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class S3ObjectInfoTest {
@Test
public void canParseBucketFromURI(){
String s3Uri = "s3://my-bucket/a/b/c";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("my-bucket", s3ObjectInfo.bucket());
}
@Test
public void canParseKeyWithoutTrailingSlashFromURI(){
String s3Uri = "s3://my-bucket/a/b/c";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/c", s3ObjectInfo.key());
}
@Test
public void canParseKeyWithTrainlingSlashFromURI(){
String s3Uri = "s3://my-bucket/a/b/c/";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/c/", s3ObjectInfo.key());
}
@Test
public void canCreateDownloadFileForKeyWithoutTrailingSlash(){
String s3Uri = "s3://my-bucket/a/b/c.txt";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("/temp/c.txt", s3ObjectInfo.createDownloadFile("/temp").getAbsolutePath());
}
@Test
public void canCreateDownloadFileForKeyWithTrailingSlash(){
String s3Uri = "s3://my-bucket/a/b/c/";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("/temp/c", s3ObjectInfo.createDownloadFile("/temp").getAbsolutePath());
}
@Test
public void canCreateNewInfoForKeyWithoutTrailingSlash() {
String s3Uri = "s3://my-bucket/a/b/c";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/c/dir", s3ObjectInfo.withNewKeySuffix("dir").key());
}
@Test
public void canCreateNewKeyForKeyWithTrailingSlash() {
String s3Uri = "s3://my-bucket/a/b/c/";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/c/dir", s3ObjectInfo.withNewKeySuffix("dir").key());
}
@Test
public void canReplacePlaceholderInKey() {
String s3Uri = "s3://my-bucket/a/b/_COMPLETION_ID_/manifest.json";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/123/manifest.json", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123").key());
}
@Test
public void canReplaceTmpPlaceholderInKey() {
String s3Uri = "s3://my-bucket/a/b/tmp/manifest.json";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/failed/manifest.json", s3ObjectInfo.replaceOrAppendKey("/tmp/", "/failed/").key());
}
@Test
public void canAppendSuffixIfNoPlaceholder() {
String s3Uri = "s3://my-bucket/a/b/";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/123", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123").key());
}
@Test
public void canAppendAltSuffixIfNoPlaceholder() {
String s3Uri = "s3://my-bucket/a/b/";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("a/b/123.json", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123", "123.json").key());
}
@Test
public void canHandlePathsWithBucketNameOnlyNoSlash(){
String s3Uri = "s3://my-bucket";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("", s3ObjectInfo.key());
assertEquals("s3://my-bucket/new-suffix", s3ObjectInfo.withNewKeySuffix("new-suffix").toString());
assertEquals("new-suffix", s3ObjectInfo.withNewKeySuffix("new-suffix").key());
assertEquals("/123", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123").key());
assertEquals("/123.json", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123", "123.json").key());
}
@Test
public void canHandlePathsWithBucketNameWithSlash(){
String s3Uri = "s3://my-bucket/";
S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(s3Uri);
assertEquals("", s3ObjectInfo.key());
assertEquals("s3://my-bucket/new-suffix", s3ObjectInfo.withNewKeySuffix("new-suffix").toString());
assertEquals("new-suffix", s3ObjectInfo.withNewKeySuffix("new-suffix").key());
assertEquals("/123", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123").key());
assertEquals("/123.json", s3ObjectInfo.replaceOrAppendKey("_COMPLETION_ID_", "123", "123.json").key());
}
@Test
public void canSetContentLengthAndDefaultEncryptionTypeProperlyWithEmptyKey(){
long testLength = 100;
String testKeyId = "";
ObjectMetadata objectMetadata = S3ObjectInfo.createObjectMetadata(testLength, testKeyId);
assertEquals(testLength, objectMetadata.getContentLength());
assertEquals(SSEAlgorithm.AES256.getAlgorithm(), objectMetadata.getSSEAlgorithm());
assertNull(objectMetadata.getSSEAwsKmsKeyId());
}
@Test
public void canSetContentLengthAndDefaultEncryptionTypeProperlyWithBlankKey(){
long testLength = 100;
String testKeyId = " ";
ObjectMetadata objectMetadata = S3ObjectInfo.createObjectMetadata(testLength, testKeyId);
assertEquals(testLength, objectMetadata.getContentLength());
assertEquals(SSEAlgorithm.AES256.getAlgorithm(), objectMetadata.getSSEAlgorithm());
assertNull(objectMetadata.getSSEAwsKmsKeyId());
}
@Test
public void canSetContentLengthAndDefaultEncryptionTypeProperlyWithNullKey(){
long testLength = 100;
ObjectMetadata objectMetadata = S3ObjectInfo.createObjectMetadata(testLength, null);
assertEquals(testLength, objectMetadata.getContentLength());
assertEquals(SSEAlgorithm.AES256.getAlgorithm(), objectMetadata.getSSEAlgorithm());
assertNull(objectMetadata.getSSEAwsKmsKeyId());
}
@Test
public void canSetContentLengthAndKmsEncryptionTypeProperlyWithCmkKey(){
long testLength = 100;
String testKeyId = "abcdefgh-hijk-0123-4567-0123456789ab";
ObjectMetadata objectMetadata = S3ObjectInfo.createObjectMetadata(testLength, testKeyId);
assertEquals(testLength, objectMetadata.getContentLength());
assertEquals(SSEAlgorithm.KMS.getAlgorithm(), objectMetadata.getSSEAlgorithm());
assertEquals(testKeyId, objectMetadata.getSSEAwsKmsKeyId());
}
}
| 845 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/io/RecordSplitterTest.java
|
package com.amazonaws.services.neptune.io;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import static org.junit.Assert.assertEquals;
public class RecordSplitterTest {
private final LargeStreamRecordHandlingStrategy STRATEGY = LargeStreamRecordHandlingStrategy.splitAndShred;
@Test
public void shouldSplitStringByLength(){
String s = "abcdefghijklmno";
assertStringCollections(Collections.singletonList("abcdefghijklmno"), RecordSplitter.splitByLength(s, 15));
assertStringCollections(Collections.singletonList("abcdefghijklmno"), RecordSplitter.splitByLength(s, 20));
assertStringCollections(Arrays.asList("abcde", "fghij", "klmno"), RecordSplitter.splitByLength(s, 5));
assertStringCollections(Arrays.asList("abcdef", "ghijkl", "mno"), RecordSplitter.splitByLength(s, 6));
}
@Test
public void shouldSplitStringAttemptToSplitOnWordBoundary(){
String s = " abc defghij klmno ";
assertStringCollections(Collections.singletonList("abc defghij klmno"), RecordSplitter.splitByLength(s, 19, 4));
assertStringCollections(Collections.singletonList("abc defghij klmno"), RecordSplitter.splitByLength(s, 24, 4));
assertStringCollections(Arrays.asList("abc", "defgh", "ij", "klmno"), RecordSplitter.splitByLength(s, 5));
assertStringCollections(Arrays.asList("abc", "defghi", "j", "klmno"), RecordSplitter.splitByLength(s, 6, 4));
}
@Test
public void shouldSplitIntoIndividualNeptuneStreamsPGRecords() throws IOException {
TestFixture testFixture = new TestFixture("t1.json", getClass());
RecordSplitter recordSplitter = new RecordSplitter(160, STRATEGY);
Collection<String> records = recordSplitter.split(testFixture.input());
assertStringCollections(testFixture.expectedOutputs(), records);
}
@Test
public void shouldSubdivideLongPGRecords() throws IOException {
TestFixture testFixture = new TestFixture("t2.json", getClass());
RecordSplitter recordSplitter = new RecordSplitter(160, STRATEGY);
Collection<String> records = recordSplitter.split(testFixture.input());
assertStringCollections(testFixture.expectedOutputs(), records);
}
@Test
public void shouldSplitCsvIntoIndividualFields() throws IOException {
TestFixture testFixture = new TestFixture("t3.json", getClass());
RecordSplitter recordSplitter = new RecordSplitter(160, STRATEGY);
Collection<String> records = recordSplitter.split(testFixture.input());
assertStringCollections(testFixture.expectedOutputs(), records);
}
@Test
public void shouldSubdivideLongCsvFields() throws IOException {
TestFixture testFixture = new TestFixture("t4.json", getClass());
RecordSplitter recordSplitter = new RecordSplitter(8, STRATEGY);
Collection<String> records = recordSplitter.split(testFixture.input());
assertStringCollections(testFixture.expectedOutputs(), records);
}
@Test
public void shouldSplitIntoIndividualNeptuneStreamsRDFRecords() throws IOException {
TestFixture testFixture = new TestFixture("t5.json", getClass());
RecordSplitter recordSplitter = new RecordSplitter(160, STRATEGY);
Collection<String> records = recordSplitter.split(testFixture.input());
assertStringCollections(testFixture.expectedOutputs(), records);
}
@Test
public void shouldSubdivideLongRDFRecords() throws IOException {
TestFixture testFixture = new TestFixture("t6.json", getClass());
RecordSplitter recordSplitter = new RecordSplitter(140, STRATEGY);
Collection<String> records = recordSplitter.split(testFixture.input());
assertStringCollections(testFixture.expectedOutputs(), records);
}
private void assertStringCollections(Collection<String> expectedOutputs, Collection<String> records) {
String msg = String.format("Expected: %s\nActual: %s", expectedOutputs, records);
assertEquals(msg, expectedOutputs.size(), records.size());
Iterator<String> expectedIterator = expectedOutputs.iterator();
Iterator<String> recordsIterator = records.iterator();
while (expectedIterator.hasNext()) {
String expected = expectedIterator.next();
String actual = recordsIterator.next();
assertEquals(expected, actual);
}
}
private static class TestFixture {
private final String input;
private final Collection<String> expectedOutputs = new ArrayList<>();
public TestFixture(String filename, Class<?> clazz) throws IOException {
JsonNode json = JsonFromResource.get(filename, clazz);
this.input = json.get("input").toString();
ArrayNode output = (ArrayNode) json.get("output");
for (JsonNode jsonNode : output) {
expectedOutputs.add(jsonNode.toString());
}
}
public String input() {
return input;
}
public Collection<String> expectedOutputs() {
return expectedOutputs;
}
}
}
| 846 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/io/DirectoriesTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.io;
import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.*;
public class DirectoriesTest {
@Test
public void replacesForbiddenCharactersInFilename() throws UnsupportedEncodingException {
String filename = "(Person;Staff;Temp\\;Holidays)-works_for-(Admin;Perm;Person)";
String updated = Directories.fileName(filename, new AtomicInteger());
assertEquals("%28Person%3BStaff%3BTemp%5C%3BHolidays%29-works_for-%28Admin%3BPerm%3BPerson%29-1", updated);
}
@Test
public void createsDigestFilePathsForVeryLongFilenames() throws IOException {
Path path = Paths.get("/export");
String longName = "abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890";
Directories directories = Directories.createFor(DirectoryStructure.PropertyGraph, new File("home"), "export-id", "", "");
Path filePath = directories.createFilePath(path, longName, PropertyGraphExportFormat.csv);
assertEquals("/export/8044f12c352773b7ff400ef524da6e90db419e4a.csv", filePath.toString());
}
}
| 847 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/io/KinesisConfigTest.java
|
package com.amazonaws.services.neptune.io;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.neptune.cli.AbstractTargetModule;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class KinesisConfigTest {
private AbstractTargetModule target;
@Before
public void resetTargetModule() {
target = spy(AbstractTargetModule.class);
}
@Test
public void shouldCreateStreamIfNameAndRegionAreProvided() {
when(target.getStreamName()).thenReturn("test");
when(target.getRegion()).thenReturn("us-west-2");
KinesisConfig config = new KinesisConfig(target);
assertNotNull(config.stream());
}
@Test
public void shouldNotCreateStreamIfNameNotProvided() {
when(target.getStreamName()).thenReturn("");
when(target.getRegion()).thenReturn("us-west-2");
KinesisConfig config = new KinesisConfig(target);
Throwable t = assertThrows(IllegalArgumentException.class, () -> config.stream());
assertEquals("You must supply an AWS Region and Amazon Kinesis Data Stream name", t.getMessage());
}
@Test
public void shouldNotCreateStreamIfRegionNotProvided() {
when(target.getStreamName()).thenReturn("test");
when(target.getRegion()).thenReturn("");
KinesisConfig config = new KinesisConfig(target);
Throwable t = assertThrows(IllegalArgumentException.class, () -> config.stream());
assertEquals("You must supply an AWS Region and Amazon Kinesis Data Stream name", t.getMessage());
}
@Test
public void shouldUseProvidedCredentialsProvider() {
when(target.getStreamName()).thenReturn("test");
when(target.getRegion()).thenReturn("us-west-2");
AWSCredentialsProvider mockedCredsProvider = mock(AWSCredentialsProvider.class);
when(target.getCredentialsProvider()).thenReturn(mockedCredsProvider);
KinesisConfig config = new KinesisConfig(target);
config.stream().publish("test");
verify(mockedCredsProvider, Mockito.atLeast(1)).getCredentials();
}
}
| 848 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/EdgeLabelStrategyTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeLabelsOnly;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeAndVertexLabels;
import static org.junit.Assert.assertEquals;
public class EdgeLabelStrategyTest {
private final GraphTraversalSource gmodern;
private final Map<String, Object> inputMap;
private final PGEdgeResult pgEdgeResult;
private final List<String> fromLabels;
private final List<String> toLabels;
public EdgeLabelStrategyTest() {
gmodern = TinkerFactory.createModern().traversal();
inputMap = new HashMap<>();
inputMap.put("~label", "TestLabel");
inputMap.put("~from", "FromID");
inputMap.put("~to", "ToID");
fromLabels = new ArrayList<>();
toLabels = new ArrayList<>();
fromLabels.add("FromLabel");
toLabels.add("ToLabels");
inputMap.put("~fromLabels", fromLabels);
inputMap.put("~toLabels", toLabels);
pgEdgeResult = new PGEdgeResult(inputMap);
}
//Edge Labels Only
@Test
public void shouldGetEdgeLabelsFromModernGraph() {
Collection<Label> labels = edgeLabelsOnly.getLabels(gmodern);
Collection<Label> expected = new HashSet<>();
expected.add(new Label("knows"));
expected.add(new Label("created"));
assertEquals(expected, labels);
}
@Test
public void shouldGetEdgeLabelForMap() {
assertEquals(new Label("TestLabel"), edgeLabelsOnly.getLabelFor(inputMap));
}
@Test
public void shouldGetEdgeLabelForPgEdgeResult() {
assertEquals(new Label("TestLabel"), edgeLabelsOnly.getLabelFor(pgEdgeResult));
}
// Edge and Vertex Labels
@Test
public void shouldGetEdgeAndVertexLabelsFromModernGraph() {
Collection<Label> labels = edgeAndVertexLabels.getLabels(gmodern);
Collection<Label> expected = new HashSet<>();
expected.add(new Label("(person)-knows-(person)"));
expected.add(new Label("(person)-created-(software)"));
assertEquals(expected, labels);
}
@Test
public void shouldGetEdgeAndVertexLabelForMap() {
assertEquals(new Label("TestLabel", fromLabels, toLabels), edgeAndVertexLabels.getLabelFor(inputMap));
}
@Test
public void shouldGetEdgeAndVertexLabelForPgEdgeResult() {
assertEquals(new Label("TestLabel", fromLabels, toLabels), edgeAndVertexLabels.getLabelFor(pgEdgeResult));
}
}
| 849 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/ExportStatsTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class ExportStatsTest {
@Test
public void testExportStats() throws JsonProcessingException {
ExportStats stats = new ExportStats();
ObjectNode neptuneExportNode = JsonNodeFactory.instance.objectNode();
GraphSchema schema = GraphSchema.fromJson(new ObjectMapper().readTree(
"{\"nodes\":[" +
"{\"label\":\"node1\"," +
"\"properties\":[" +
"{\"property\":\"prop1\",\"dataType\":\"String\",\"isMultiValue\":false,\"isNullable\":false,\"allTypes\":[\"String\"]}," +
"{\"property\":\"prop2\",\"dataType\":\"Double\",\"isMultiValue\":true,\"isNullable\":true,\"allTypes\":[\"Double\",\"Float\"]}]}," +
"{\"label\":\"node2\"," +
"\"properties\":[]}" +
"]," +
"\"edges\":[" +
"{\"label\":\"edge1\"," +
"\"properties\":[" +
"{\"property\":\"prop1\",\"dataType\":\"String\",\"isMultiValue\":false,\"isNullable\":false,\"allTypes\":[\"String\"]}," +
"{\"property\":\"prop2\",\"dataType\":\"Double\",\"isMultiValue\":true,\"isNullable\":true,\"allTypes\":[\"Double\",\"Float\"]}]}," +
"{\"label\":\"edge2\"," +
"\"properties\":[]}" +
"]}"
));
stats.incrementNodeStats(new Label("node1"));
stats.incrementNodeStats(new Label("node2"));
stats.incrementEdgeStats(new Label("edge1"));
stats.incrementEdgeStats(new Label("edge2"));
stats.addTo(neptuneExportNode, schema);
String formattedStats = stats.formatStats(schema);
String expectedStats =
"Source:\n" +
" Nodes: 0\n" +
" Edges: 0\n" +
"Export:\n" +
" Nodes: 2\n" +
" Edges: 2\n" +
" Properties: 0\n" +
"Details:\n" +
" Nodes: \n" +
" node1: 1\n" +
" |_ prop1 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n" +
" |_ prop2 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n" +
" node2: 1\n" +
" Edges: \n" +
" edge2: 1\n" +
" edge1: 1\n" +
" |_ prop1 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n" +
" |_ prop2 {propertyCount=0, minCardinality=-1, maxCardinality=-1, recordCount=0, dataTypeCounts=[]}\n";
assertEquals(expectedStats, formattedStats);
}
}
| 850 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/RangeFactoryTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.cluster.ConcurrencyConfig;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.util.NotImplementedException;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.junit.Test;
import java.util.Collection;
import java.util.Map;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class RangeFactoryTest {
private static final LabelsFilter ALL_LABELS = new AllLabels(new LabelStrategy() {
@Override
public Collection<Label> getLabels(GraphTraversalSource g) {
throw new NotImplementedException();
}
@Override
public Label getLabelFor(Map<String, Object> input) {
throw new NotImplementedException();
}
@Override
public Label getLabelFor(PGResult input) {
throw new NotImplementedException();
}
@Override
public String[] additionalColumns(String... columns) {
throw new NotImplementedException();
}
@Override
public <T> GraphTraversal<? extends Element, T> addAdditionalColumns(GraphTraversal<? extends Element, T> t) {
throw new NotImplementedException();
}
});
@Test
public void shouldReturnConsecutiveRanges(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(2250L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(1000, 0, 2500, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(0, 1000)", range1.toString());
Range range2 = rangeFactory.nextRange();
assertEquals("range(1000, 2000)", range2.toString());
Range range3 = rangeFactory.nextRange();
assertEquals("range(2000, 2500)", range3.toString());
assertFalse(range3.isEmpty());
Range range4 = rangeFactory.nextRange();
assertEquals("range(-1, -1)", range4.toString());
assertTrue(range4.isEmpty());
}
@Test
public void shouldReturnSingleRangeForAllIfRangeSizeIsMinusOne(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(2250L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(-1, 0, Long.MAX_VALUE, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(0, -1)", range1.toString());
assertFalse(range1.isEmpty());
Range range2 = rangeFactory.nextRange();
assertEquals("range(-1, -1)", range2.toString());
assertTrue(range2.isEmpty());
}
@Test
public void shouldLeaveLastRangeOpenIfNoUpperLimit(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(2250L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(1000, 0, Long.MAX_VALUE, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(0, 1000)", range1.toString());
Range range2 = rangeFactory.nextRange();
assertEquals("range(1000, 2000)", range2.toString());
Range range3 = rangeFactory.nextRange();
assertEquals("range(2000, -1)", range3.toString());
assertFalse(range3.isEmpty());
Range range4 = rangeFactory.nextRange();
assertEquals("range(-1, -1)", range4.toString());
assertTrue(range4.isEmpty());
}
@Test
public void shouldIndicateThatItIsExhausted(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(5000L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(1000, 0, 2000, -1, -1),
new ConcurrencyConfig(1));
rangeFactory.nextRange();
assertFalse(rangeFactory.isExhausted());
rangeFactory.nextRange();
assertTrue(rangeFactory.isExhausted());
}
@Test
public void shouldCalculateRangesStartingFromSkipNumber(){
GraphClient<?> graphClient = mock(GraphClient.class);
when(graphClient.approxCount(any(), any(), any())).thenReturn(30L);
RangeFactory rangeFactory = RangeFactory.create(
graphClient,
ALL_LABELS,
GremlinFilters.EMPTY,
new RangeConfig(10, 20, 10, -1, -1),
new ConcurrencyConfig(1));
Range range1 = rangeFactory.nextRange();
assertEquals("range(20, 30)", range1.toString());
assertTrue(rangeFactory.isExhausted());
}
}
| 851 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/EdgesClientTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.GraphElementHandler;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class EdgesClientTest {
private EdgesClient client;
private GraphTraversalSource graphTraversalSource;
private ExportStats mockStats;
private FeatureToggles mockFeatures;
@Before
public void setup() {
graphTraversalSource = TinkerFactory.createModern().traversal();
mockStats = mock(ExportStats.class);
mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new EdgesClient(graphTraversalSource, false, mockStats, mockFeatures);
}
@Test
public void testQueryForSchema() throws JsonProcessingException {
GraphSchema schema = new GraphSchema();
client.queryForSchema(
new GraphElementHandler<Map<?, Object>>() {
@Override
public void handle(Map<?, Object> properties, boolean allowTokens) throws IOException {
schema.update(GraphElementType.edges, properties, allowTokens);
}
@Override
public void close() {}
},
Range.ALL, new AllLabels(EdgeLabelStrategy.edgeLabelsOnly), GremlinFilters.EMPTY);
JsonNode expectedSchema = new ObjectMapper().readTree(
"{\n" +
" \"edges\" : [ {\n" +
" \"label\" : \"knows\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"weight\",\n" +
" \"dataType\" : \"Double\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"Double\" ]\n" +
" } ]\n" +
" }, {\n" +
" \"label\" : \"created\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"weight\",\n" +
" \"dataType\" : \"Double\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"Double\" ]\n" +
" } ]\n" +
" } ]\n" +
"}"
);
assertEquals(expectedSchema, schema.toJson(false));
}
@Test
public void testQueryForValues() {
List<String> ids = new ArrayList<>();
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
ids.add(element.getId());
assertFalse(allowTokens);
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, new AllLabels(EdgeLabelStrategy.edgeLabelsOnly),
GremlinFilters.EMPTY, new GraphElementSchemas());
assertEquals(Arrays.asList("7","8","9","10","11","12"), ids);
}
}
| 852 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/NeptuneGremlinClientTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.auth.HandshakeRequestConfig;
import com.amazonaws.services.neptune.cluster.ConcurrencyConfig;
import com.amazonaws.services.neptune.cluster.ConnectionConfig;
import com.amazonaws.services.neptune.propertygraph.io.SerializationConfig;
import org.apache.tinkerpop.gremlin.driver.Cluster;
import org.apache.tinkerpop.gremlin.driver.HandshakeInterceptor;
import org.apache.tinkerpop.gremlin.driver.LBAwareSigV4WebSocketChannelizer;
import org.apache.tinkerpop.gremlin.driver.ser.Serializers;
import org.junit.Test;
import org.apache.tinkerpop.gremlin.driver.Client;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class NeptuneGremlinClientTest {
private final SerializationConfig defaultSerializationConfig = new SerializationConfig(
Serializers.GRAPHBINARY_V1D0.name(), 50000000, NeptuneGremlinClient.DEFAULT_BATCH_SIZE, false);
@Test
public void testQueryClientSubmit() {
Client mockedClient = mock(Client.class);
NeptuneGremlinClient.QueryClient qc = new NeptuneGremlinClient.QueryClient(mockedClient);
qc.submit("test", null);
verify(mockedClient).submit("test");
}
@Test
public void testConnectionConfigPassthrough() {
com.amazonaws.services.neptune.cluster.Cluster mockedCluster = mock(com.amazonaws.services.neptune.cluster.Cluster.class);
Collection endpoints = new HashSet();
endpoints.add("localhost");
//With SSL Enabled
when(mockedCluster.connectionConfig()).thenReturn(new ConnectionConfig(
null, endpoints, 1234, false, true, null));
when(mockedCluster.concurrencyConfig()).thenReturn(new ConcurrencyConfig(1));
NeptuneGremlinClient client = NeptuneGremlinClient.create(mockedCluster, defaultSerializationConfig);
Cluster cluster = getClusterFromClient(client);
cluster.init();
assertEquals(1234, cluster.getPort());
assertEquals("wss://localhost:1234/gremlin", cluster.allHosts().iterator().next().getHostUri().toString());
assertEquals(true, cluster.isSslEnabled());
//With SSL Disabled
when(mockedCluster.connectionConfig()).thenReturn(new ConnectionConfig(
null, endpoints, 1234, false, false, null));
client = NeptuneGremlinClient.create(mockedCluster, defaultSerializationConfig);
cluster = getClusterFromClient(client);
cluster.init();
assertEquals("ws://localhost:1234/gremlin", cluster.allHosts().iterator().next().getHostUri().toString());
assertEquals(false, cluster.isSslEnabled());
}
@Test
public void shouldUseHandshakeInterceptorForSigningDirectConnections() {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
when(mockedConfig.isDirectConnection()).thenReturn(true);
Cluster.Builder builder = Cluster.build();
builder = NeptuneGremlinClient.configureIamSigning(builder, mockedConfig);
Cluster cluster = builder.create();
HandshakeInterceptor interceptor;
try {
Method getHandshakeInterceptor = cluster.getClass().getDeclaredMethod("getHandshakeInterceptor");
getHandshakeInterceptor.setAccessible(true);
interceptor = (HandshakeInterceptor) getHandshakeInterceptor.invoke(cluster);
getHandshakeInterceptor.setAccessible(false);
} catch (Exception e) {
throw new RuntimeException(e);
}
assertNotNull(interceptor);
assertNotEquals(interceptor, HandshakeInterceptor.NO_OP);
}
@Test
public void shouldUseLBAwareChannelizerForSigningProxyConnections() {
ConnectionConfig mockedConfig = mock(ConnectionConfig.class);
when(mockedConfig.isDirectConnection()).thenReturn(false);
when(mockedConfig.handshakeRequestConfig()).thenReturn(mock(HandshakeRequestConfig.class));
Cluster.Builder builder = Cluster.build();
builder = NeptuneGremlinClient.configureIamSigning(builder, mockedConfig);
assertEquals(LBAwareSigV4WebSocketChannelizer.class.getName(), builder.create().getChannelizer());
}
private static Cluster getClusterFromClient(NeptuneGremlinClient client) {
try {
Field clusterField = client.getClass().getDeclaredField("cluster");
clusterField.setAccessible(true);
Cluster cluster = (Cluster) clusterField.get(client);
clusterField.setAccessible(false);
return cluster;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| 853 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/LabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;
import java.util.Arrays;
import static org.junit.Assert.*;
public class LabelTest {
@Test
public void shouldParseSimpleNodeLabelFromJson() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : \"label1\"\n" +
"}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("label1", label.fullyQualifiedLabel());
}
@Test
public void shouldParseComplexNodeLabelFromSingleSemiColonSeparatedStringValue() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : \"labelB;labelA\"\n" +
"}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("labelA;labelB", label.fullyQualifiedLabel());
}
@Test
public void shouldParseComplexNodeLabelFromArray() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : [ \"labelB\", \"labelA\" ]\n" +
"}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("labelA;labelB", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJson() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : [ \"startLabel\" ],\n" +
" \"~toLabels\" : [ \"endLabel\" ]\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel)-edgeLabel-(endLabel)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelWithMultiLabelStartAndEndVerticesFromJson() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : [ \"startLabel2\", \"startLabel1\" ],\n" +
" \"~toLabels\" : [ \"endLabel2\", \"endLabel1\", \"endLabel3\" ]\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel1;startLabel2)-edgeLabel-(endLabel1;endLabel2;endLabel3)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithSimpleStringStartAndEndVertexLabels() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : \"startLabel\",\n" +
" \"~toLabels\" : \"endLabel\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel)-edgeLabel-(endLabel)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithSemicolonSeparatedStringStartAndEndVertexLabels() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : \"startLabel2;startLabel1\",\n" +
" \"~toLabels\" : \"endLabel2;endLabel1;endLabel3\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel1;startLabel2)-edgeLabel-(endLabel1;endLabel2;endLabel3)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithMissingStartVertexLabel() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~toLabels\" : \"endLabel\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(_)-edgeLabel-(endLabel)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithMissingEndVertexLabel() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\",\n" +
" \"~fromLabels\" : [ \"startLabel\" ]\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("(startLabel)-edgeLabel-(_)", label.fullyQualifiedLabel());
}
@Test
public void shouldParseEdgeLabelFromJsonWithMissingStartAndEndVertexLabels() throws JsonProcessingException {
String json = "{\n" +
" \"label\" : {\n" +
" \"~label\" : \"edgeLabel\"\n" +
" }\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Label label = Label.fromJson(jsonNode.path("label"));
assertEquals("edgeLabel", label.fullyQualifiedLabel());
}
@Test
public void twoSimpleLabelsCanBeAssignedFromEachOther(){
Label l1 = new Label("my-label");
Label l2 = new Label("my-label");
assertTrue(l1.isAssignableFrom(l2));
assertTrue(l2.isAssignableFrom(l1));
}
@Test
public void twoEquivalentComplexLabelsCanBeAssignedFromEachOther(){
Label l1 = new Label("my-label", "startLabel1;startLabel2", "endLabel1;endLabel2");
Label l2 = new Label("my-label", "startLabel1;startLabel2", "endLabel1;endLabel2");
Label l3 = new Label("my-label", "startLabel2;startLabel1", "endLabel2;endLabel1");
assertTrue(l1.isAssignableFrom(l2));
assertTrue(l1.isAssignableFrom(l3));
assertTrue(l2.isAssignableFrom(l1));
assertTrue(l2.isAssignableFrom(l3));
assertTrue(l3.isAssignableFrom(l1));
assertTrue(l3.isAssignableFrom(l2));
}
@Test
public void simpleLabelCanBeAssignedFromComplexLabelButComplexLabelCannotBeAssignedFromSimpleLabel(){
Label l1 = new Label("my-label");
Label l2 = new Label("my-label", "startLabel", "endLabel");
assertTrue(l1.isAssignableFrom(l2));
assertFalse(l2.isAssignableFrom(l1));
}
@Test
public void complexLabelComprisingSubsetOfAnotherComplexLabelCanBeAssignedFromLatter(){
Label l1 = new Label("my-label", "startLabel1", "endLabel1");
Label l2 = new Label("my-label", "startLabel1", "");
Label l3 = new Label("my-label", Arrays.asList("startLabel2", "startLabel1"), Arrays.asList("endLabel2", "endLabel1"));
assertTrue(l1.isAssignableFrom(l3));
assertTrue(l2.isAssignableFrom(l3));
assertTrue(l2.isAssignableFrom(l1));
assertFalse(l3.isAssignableFrom(l1));
assertFalse(l3.isAssignableFrom(l2));
assertFalse(l1.isAssignableFrom(l2));
}
@Test
public void complexLabelsThatOnlyOverlapCannotBeAssignedFromEachOther(){
Label l1 = new Label("my-label", "startLabel1, startLabel2", "endLabel1, endLabel2");
Label l2 = new Label("my-label", "startLabel2, startLabel3", "endLabel2, endLabel3");
assertFalse(l1.isAssignableFrom(l2));
assertFalse(l2.isAssignableFrom(l1));
}
}
| 854 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/NodesClientTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.GraphElementHandler;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NodesClientTest {
private NodesClient client;
private GraphTraversalSource graphTraversalSource;
private ExportStats mockStats;
private FeatureToggles mockFeatures;
@Before
public void setup() {
graphTraversalSource = TinkerFactory.createModern().traversal();
mockStats = mock(ExportStats.class);
mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new NodesClient(graphTraversalSource, false, mockStats, mockFeatures);
}
@Test
public void testQueryForSchema() throws JsonProcessingException {
GraphSchema schema = new GraphSchema();
client.queryForSchema(
new GraphElementHandler<Map<?, Object>>() {
@Override
public void handle(Map<?, Object> properties, boolean allowTokens) throws IOException {
schema.update(GraphElementType.nodes, properties, allowTokens);
}
@Override
public void close() {}
},
Range.ALL, new AllLabels(NodeLabelStrategy.nodeLabelsOnly), GremlinFilters.EMPTY);
JsonNode expectedSchema = new ObjectMapper().readTree(
"{\n" +
" \"nodes\" : [ {\n" +
" \"label\" : \"software\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"name\",\n" +
" \"dataType\" : \"String\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"String\" ]\n" +
" }, {\n" +
" \"property\" : \"lang\",\n" +
" \"dataType\" : \"String\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"String\" ]\n" +
" } ]\n" +
" }, {\n" +
" \"label\" : \"person\",\n" +
" \"properties\" : [ {\n" +
" \"property\" : \"name\",\n" +
" \"dataType\" : \"String\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"String\" ]\n" +
" }, {\n" +
" \"property\" : \"age\",\n" +
" \"dataType\" : \"Integer\",\n" +
" \"isMultiValue\" : false,\n" +
" \"isNullable\" : false,\n" +
" \"allTypes\" : [ \"Integer\" ]\n" +
" } ]\n" +
" } ]\n" +
"}"
);
assertEquals(expectedSchema, schema.toJson(false));
}
@Test
public void testQueryForValues() {
List<String> ids = new ArrayList<>();
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
ids.add(element.getId());
assertFalse(allowTokens);
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, new AllLabels(NodeLabelStrategy.nodeLabelsOnly),
GremlinFilters.EMPTY, new GraphElementSchemas());
assertEquals(Arrays.asList("1","2","3","4","5","6"), ids);
}
}
| 855 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/NodeLabelStrategyTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.propertygraph.io.result.ExportPGNodeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeAndVertexLabels;
import static com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy.edgeLabelsOnly;
import static com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy.nodeLabelsOnly;
import static org.junit.Assert.assertEquals;
public class NodeLabelStrategyTest {
private final GraphTraversalSource gmodern;
private final Map<String, Object> inputMap;
private final PGResult pgNodeResult;
private final List<String> labels;
public NodeLabelStrategyTest() {
gmodern = TinkerFactory.createModern().traversal();
labels = new ArrayList<>();
labels.add("TestLabel");
inputMap = new HashMap<>();
inputMap.put("~label", labels);
pgNodeResult = new ExportPGNodeResult(inputMap);
}
//Node Labels Only
@Test
public void shouldGetLabelsFromModernGraph() {
Collection<Label> labels = nodeLabelsOnly.getLabels(gmodern);
Collection<Label> expected = new HashSet<>();
expected.add(new Label("person"));
expected.add(new Label("software"));
assertEquals(expected, labels);
}
@Test
public void shouldGetLabelForMap() {
assertEquals(new Label(labels), nodeLabelsOnly.getLabelFor(inputMap));
}
@Test
public void shouldGetLabelForPgEdgeResult() {
assertEquals(new Label(labels), nodeLabelsOnly.getLabelFor(pgNodeResult));
}
}
| 856 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/SpecifiedLabelsTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.result.ExportPGNodeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
public class SpecifiedLabelsTest {
@Test
public void shouldCreateLabelFilterForSimpleSingleNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("label1")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().hasLabel(\"label1\")",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexSingleNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("label1;label2")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().hasLabel(\"label1\").hasLabel(\"label2\")",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterWithOrForMultipleSimpleNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1"), new Label("label2")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().or(__.hasLabel(\"label1\"),__.hasLabel(\"label2\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterWithOrForMultipleComplexNodeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1;labelA"), new Label("label2;labelB")),
NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V().or(__.hasLabel(\"label1\").hasLabel(\"labelA\"),__.hasLabel(\"label2\").hasLabel(\"labelB\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForSimpleEdgeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel", "endLabel")),
EdgeLabelStrategy.edgeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\")",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel", "endLabel")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.and(__.outV().hasLabel(\"startLabel\"),__.inV().hasLabel(\"endLabel\")))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabelWithComplexVertexLabels() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel1;startLabel2", "endLabel1;endLabel2")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.and(__.outV().hasLabel(\"startLabel1\").hasLabel(\"startLabel2\"),__.inV().hasLabel(\"endLabel1\").hasLabel(\"endLabel2\")))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabelWithOnlyStartVertexLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "startLabel", "")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.outV().hasLabel(\"startLabel\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldCreateLabelFilterForComplexEdgeLabelWithOnlyEndVertexLabel() {
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Collections.singletonList(new Label("edgeLabel1", "", "endLabel")),
EdgeLabelStrategy.edgeAndVertexLabels);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
specifiedLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.edges);
assertEquals("__.E().hasLabel(\"edgeLabel1\").where(__.inV().hasLabel(\"endLabel\"))",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void simpleEdgeLabelsShouldProvideIntersectionWithComplexEdgeLabels() {
SpecifiedLabels specifiedSimpleEdgeLabels = new SpecifiedLabels(
Arrays.asList(new Label("edgeLabel1"), new Label("edgeLabel2"), new Label("edgeLabel3")),
EdgeLabelStrategy.edgeAndVertexLabels);
List<Label> complexEdgeLabels = Arrays.asList(
new Label("edgeLabel2", "fromLabel2", "toLabel2"),
new Label("edgeLabel4", "fromLabel4", "toLabel4"));
LabelsFilter newFilter = specifiedSimpleEdgeLabels.intersection(complexEdgeLabels);
assertFalse(newFilter.isEmpty());
assertEquals("edges with label(s) '(fromLabel2)-edgeLabel2-(toLabel2)'", newFilter.description("edges"));
}
@Test
public void complexEdgeLabelsShouldProvideEmptyIntersectionWithSimpleEdgeLabels() {
SpecifiedLabels specifiedComplexEdgeLabels = new SpecifiedLabels(
Arrays.asList(new Label("edgeLabel1", "fromLabel1", "toLabel1"),
new Label("edgeLabel2", "fromLabel2", "toLabel2"),
new Label("edgeLabel3", "fromLabel3", "toLabel3")),
EdgeLabelStrategy.edgeAndVertexLabels);
List<Label> simpleEdgeLabels = Arrays.asList(
new Label("edgeLabel2"),
new Label("edgeLabel4"));
LabelsFilter newFilter = specifiedComplexEdgeLabels.intersection(simpleEdgeLabels);
assertTrue(newFilter.isEmpty());
assertEquals("edges with zero labels", newFilter.description("edges"));
}
@Test
public void shouldGetSpecifiedLabelForPGResult() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1"), new Label("label2")), labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = Collections.singletonList("label1");
input.put("~label", labels);
PGResult pgResult = new ExportPGNodeResult(input);
Label label = specifiedLabels.getLabelFor(pgResult);
assertEquals(new Label(labels), label);
}
@Test
public void shouldGetSpecifiedLabelForInputMap() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
SpecifiedLabels specifiedLabels = new SpecifiedLabels(
Arrays.asList(new Label("label1"), new Label("label2")), labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = Collections.singletonList("label1");
input.put("~label", labels);
Label label = specifiedLabels.getLabelFor(input);
assertEquals(new Label(labels), label);
}
}
| 857 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/RangeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class RangeTest {
@Test
public void shouldIndicateThatRangeCoversAll(){
assertTrue(new Range(0, -1).isAll());
assertFalse(new Range(0, 1).isAll());
assertFalse(new Range(-1, -1).isAll());
}
@Test
public void shouldIndicateIfEmpty(){
assertTrue(new Range(-1, -1).isEmpty());
assertFalse(new Range(0, 1).isEmpty());
assertFalse(new Range(0, -1).isEmpty());
}
@Test
public void shouldIndicateIfSizeBiggerThanSuupliedValue(){
assertTrue(new Range(0, -1).sizeExceeds(100));
assertTrue(new Range(0, 200).sizeExceeds(100));
assertFalse(new Range(0, 100).sizeExceeds(100));
assertFalse(new Range(0, 100).sizeExceeds(200));
assertFalse(new Range(-1, -1).sizeExceeds(1));
}
}
| 858 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/AllLabelsTest.java
|
package com.amazonaws.services.neptune.propertygraph;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.io.result.ExportPGNodeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class AllLabelsTest {
@Test
public void shouldGetLabelForPGResult() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
AllLabels allLabels = new AllLabels(labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = new ArrayList<>();
labels.add("TestLabel");
input.put("~label", labels);
PGResult pgResult = new ExportPGNodeResult(input);
Label label = allLabels.getLabelFor(pgResult);
assertEquals(new Label(labels), label);
}
@Test
public void shouldGetLabelForInputMap() {
LabelStrategy labelStrategy = NodeLabelStrategy.nodeLabelsOnly;
AllLabels allLabels = new AllLabels(labelStrategy);
Map<String, Object> input = new HashMap<>();
List<String> labels = new ArrayList<>();
labels.add("TestLabel");
input.put("~label", labels);
Label label = allLabels.getLabelFor(input);
assertEquals(new Label(labels), label);
}
@Test
public void shouldNotAddAnyLabelFiltersWhenApplied() {
AllLabels allLabels = new AllLabels(NodeLabelStrategy.nodeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
allLabels.apply(g.V(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.V()",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void shouldNotAddAnyLabelFiltersWhenAppliedForEdges() {
AllLabels allLabels = new AllLabels(EdgeLabelStrategy.edgeLabelsOnly);
AnonymousTraversalSource<GraphTraversalSource> traversalSource = AnonymousTraversalSource.traversal();
GraphTraversalSource g = traversalSource.withGraph(EmptyGraph.instance());
GraphTraversal<? extends Element, ?> traversal =
allLabels.apply(g.E(), new FeatureToggles(Collections.emptyList()), GraphElementType.nodes);
assertEquals("__.E()",
GremlinQueryDebugger.queryAsString(traversal));
}
@Test
public void getPropertiesForLabelsTest() {
AllLabels allLabels = new AllLabels(NodeLabelStrategy.nodeLabelsOnly);
GraphElementSchemas graphElementSchemas = new GraphElementSchemas();
Label label = new Label("test");
Map<String, Object> properties = new HashMap<>();
properties.put("Test Prop int", 1);
properties.put("Test Prop String", "String");
properties.put("Test Prop Array", new int[]{1, 2});
graphElementSchemas.update(label, properties, false);
String[] propertyLabels = allLabels.getPropertiesForLabels(graphElementSchemas);
assertEquals(new String[]{"Test Prop String", "Test Prop Array", "Test Prop int"}, propertyLabels);
}
}
| 859 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/CsvPropertyGraphPrinterTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.PrintOutputWriter;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.schema.DataType;
import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema;
import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVRecord;
import org.junit.Test;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.*;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
public class CsvPropertyGraphPrinterTest {
@Test
public void shouldUseSeparatorToSeparateMultipleValues() throws Exception {
String separator = "|";
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("X", "Y"));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
new PrinterOptions(CsvPrinterOptions.builder().setMultiValueSeparator(separator).build()));
printer.printProperties(props);
assertEquals(
"\"X|Y\"",
stringWriter.toString());
}
@Test
public void shouldEscapeSeparatorValuesInMultipleValues() throws Exception {
String separator = "|";
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("A|B", "Y"));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
new PrinterOptions(CsvPrinterOptions.builder().setMultiValueSeparator(separator).build()));
printer.printProperties(props);
assertEquals(
"\"A\\|B|Y\"",
stringWriter.toString());
}
@Test
public void shouldUseEmptySeparatorToSeparateMultipleValues() throws Exception {
String separator = "";
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("X;B", "Y"));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
new PrinterOptions(CsvPrinterOptions.builder().setMultiValueSeparator(separator).build()));
printer.printProperties(props);
assertEquals(
"\"X;BY\"",
stringWriter.toString());
}
@Test
public void shouldEscapeTwoDoubleQuoteAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("{\"hobby\" : \"watching \"Flash\"\"}",
"\"{\"\"hobby\"\" : \"\"watching \"\"Flash\"\"\"\"}\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldEscapeThreeDoubleQuoteAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("{\"hobby\" : \"watching \"The \"Flash\"\"\"}",
"\"{\"\"hobby\"\" : \"\"watching \"\"The \"\"Flash\"\"\"\"\"\"}\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldPrintCommaInStringWhenPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("{\"hobby\", \"watching \"The \"Flash\"\"}",
"\"{\"\"hobby\"\", \"\"watching \"\"The \"\"Flash\"\"\"\"}\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldNotEscapeNewlineCharAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\nB", "\"A\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldNotEscapeNewlineAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A" + System.lineSeparator() + "B", "\"A\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldEscapeNewlineCharSetTrueAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\nB",
"\"A\\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().setEscapeNewline(true).build()));
}
@Test
public void shouldEscapeNewlineSetTrueAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A" + System.lineSeparator() + "B",
"\"A\\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().setEscapeNewline(true).build()));
}
@Test
public void shouldNotEscapeNewlineCharsAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\n\nB", "\"A\n\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().build()));
}
@Test
public void shouldEscapeNewlineCharsSetTrueAfterPrintPropertiesToCSVAndRewrite() throws Exception {
testEscapeCharacterAfterPrintPropertiesAndRewrite("A\n\nB",
"\"A\\n\\nB\"",
new PrinterOptions(CsvPrinterOptions.builder().setEscapeNewline(true).build()));
}
// A set of tests to ensure that String escaping is done properly when CSVPropertyGraphPrinter prints to
// a buffer, so when the buffer is read in by CSVFormat, the original property string is received
private void testEscapeCharacterAfterPrintPropertiesAndRewrite(String originalValue, String expectedValue, PrinterOptions printerOptions) throws IOException {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Collections.singletonList(originalValue));
}};
CsvPropertyGraphPrinter printer = new CsvPropertyGraphPrinter(
new PrintOutputWriter("outputId", stringWriter),
labelSchema,
printerOptions);
printer.printProperties(props);
// all double quotes should be escaped when printer prints
assertEquals(expectedValue, stringWriter.toString());
// using CSVFormat to read in printed items (same library used by RewriteCSV)
String[] filePropertyHeaders = labelSchema.propertySchemas().stream()
.map(p -> p.property().toString())
.collect(Collectors.toList())
.toArray(new String[]{});
CSVFormat format = CSVFormat.RFC4180.builder().setHeader(filePropertyHeaders).build();
Reader in = new StringReader(stringWriter.toString());
Iterable<CSVRecord> records = format.parse(in);
for (CSVRecord record : records) {
// what CSVFormat read in from printed CSV should be the original value
if (printerOptions.csv().escapeNewline()){
// parsed record will contain escaped newline, to compare to original we have to unescape it
assertEquals(originalValue, record.get("property1").replace("\\n", "\n"));
} else {
assertEquals(originalValue, record.get("property1"));
}
// double quotes should all be properly escaped again when we format for rewrite
assertEquals(expectedValue, DataType.String.format(record.get("property1"), printerOptions.csv().escapeNewline()));
}
}
}
| 860 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/JsonPropertyGraphPrinterTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.PrintOutputWriter;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.schema.DataType;
import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema;
import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
import java.util.*;
import static com.amazonaws.services.neptune.util.MapUtils.entry;
import static com.amazonaws.services.neptune.util.MapUtils.map;
import static org.junit.Assert.*;
import static org.junit.Assert.assertTrue;
public class JsonPropertyGraphPrinterTest {
@Test
public void shouldPrintEdge() throws Exception {
StringWriter stringWriter = new StringWriter();
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(
new PrintOutputWriter("test", stringWriter),
new LabelSchema(new Label("my-label")),
PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printEdge("edge-id", "edge-label", "from-id", "to-id");
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"~id\":\"edge-id\",\"~label\":\"edge-label\",\"~from\":\"from-id\",\"~to\":\"to-id\"}",
stringWriter.toString());
}
@Test
public void shouldPrintEmptyListAsListIrrespectiveOfWhetherMultiValueIsTrue() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
PropertySchema propertySchema2 = new PropertySchema("property2", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
labelSchema.put("property2", propertySchema2);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", new ArrayList<>());
put("property2", new ArrayList<>());
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"property1\":[],\"property2\":[]}",
stringWriter.toString());
}
@Test
public void shouldPrintSingleValueListAsSingleValueWhenIsMultiValueIsFalse() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema = new PropertySchema("tags", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("tags", propertySchema);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("tags", Collections.singletonList("tag1"));
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"tags\":\"tag1\"}",
stringWriter.toString());
}
@Test
public void shouldPrintSingleValueListAsSingleValueWhenIsMultiValueIsFalseButStrictCardinalityIsEnforced() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema = new PropertySchema("tags", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("tags", propertySchema);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("tags", Collections.singletonList("tag1"));
}};
PrinterOptions printerOptions = new PrinterOptions(JsonPrinterOptions.builder().setStrictCardinality(true).build());
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, printerOptions)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"tags\":[\"tag1\"]}",
stringWriter.toString());
}
@Test
public void shouldPrintSingleValueListAsArrayWhenIsMultiValueIsTrue() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema = new PropertySchema("tags", false, DataType.String, true, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("tags", propertySchema);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("tags", Collections.singletonList("tag1"));
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"tags\":[\"tag1\"]}",
stringWriter.toString());
}
@Test
public void shouldPrintMultiValueListAsArrayIrrespectiveOfWhetherMultiValueIsTrue() throws Exception {
StringWriter stringWriter = new StringWriter();
PropertySchema propertySchema1 = new PropertySchema("property1", false, DataType.String, true, EnumSet.noneOf(DataType.class));
PropertySchema propertySchema2 = new PropertySchema("property2", false, DataType.String, false, EnumSet.noneOf(DataType.class));
LabelSchema labelSchema = new LabelSchema(new Label("Entity"));
labelSchema.put("property1", propertySchema1);
labelSchema.put("property2", propertySchema2);
HashMap<String, List<String>> props = new HashMap<String, List<String>>() {{
put("property1", Arrays.asList("tag1", "tag2"));
put("property2", Arrays.asList("tag1", "tag2"));
}};
try (PropertyGraphPrinter propertyGraphPrinter = PropertyGraphExportFormat.json.createPrinter(new PrintOutputWriter("outputId", stringWriter), labelSchema, PrinterOptions.NULL_OPTIONS)) {
propertyGraphPrinter.printStartRow();
propertyGraphPrinter.printProperties(props);
propertyGraphPrinter.printEndRow();
}
assertEquals(
"{\"property1\":[\"tag1\",\"tag2\"],\"property2\":[\"tag1\",\"tag2\"]}",
stringWriter.toString());
}
@Test
public void appendsPreviouslyUnseenValuesToObjectWhenInferringSchema() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("fname", "fname1")),
map(entry("fname", "fname2"), entry("lname", "lname2")),
map(entry("fname", "fname3"), entry("age", 30)),
map(entry("lname", "lname4"), entry("age", 40)),
map(entry("fname", "fname5"), entry("lname", "lname5"), entry("age", 50))
);
String expectedOutput = "{\"fname\":\"fname1\"}\n" +
"{\"fname\":\"fname2\",\"lname\":\"lname2\"}\n" +
"{\"fname\":\"fname3\",\"age\":30}\n" +
"{\"lname\":\"lname4\",\"age\":40}\n" +
"{\"fname\":\"fname5\",\"lname\":\"lname5\",\"age\":50}";
assertEquals(expectedOutput, stringWriter.toString());
}
@Test
public void updatesDataTypesInSchemaForColumnsWithEachNewRowWhenInferringSchema() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("age", 10)),
map(entry("age", "ten"), entry("height", 5)),
map(entry("age", 11), entry("height", 5.2))
);
assertEquals(2, labelSchema.propertyCount());
assertEquals(DataType.String, labelSchema.getPropertySchema("age").dataType());
}
@Test
@Ignore
public void keepsOriginalDatatypesForPropertyValuesWhenWritingProperties() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("age", 10)),
map(entry("age", "ten"), entry("height", 5)),
map(entry("age", 11), entry("height", 5.2))
);
String expectedOutput = "{\"age\":10}\n" +
"{\"age\":\"ten\",\"height\":5}\n" +
"{\"age\":11,\"height\":5.2}";
assertEquals(expectedOutput, stringWriter.toString());
}
@Test
public void columnsThatDoNotAppearInFirstRowAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-1", 30), entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertFalse(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
@Test
public void columnsThatAppearInFirstRowButNotSubsequentRowsAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
PropertyGraphPrinter printer = PropertyGraphExportFormat.json.createPrinterForInferredSchema(
new PrintOutputWriter("test", stringWriter),
labelSchema,
PrinterOptions.NULL_OPTIONS);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertTrue(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
private void print(PropertyGraphPrinter printer, Map<?, ?>... rows) throws IOException {
for (Map<?, ?> row : rows) {
printer.printStartRow();
printer.printProperties(row);
printer.printEndRow();
}
}
}
| 861 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/QueryTaskTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.Status;
import com.amazonaws.services.neptune.io.StatusOutputFormat;
import com.amazonaws.services.neptune.propertygraph.AllLabels;
import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.NamedQuery;
import com.amazonaws.services.neptune.propertygraph.NeptuneGremlinClient;
import com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.schema.FileSpecificLabelSchemas;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.driver.Result;
import org.apache.tinkerpop.gremlin.driver.ResultSet;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.io.IOException;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class QueryTaskTest {
private final GraphTraversalSource gModern = TinkerFactory.createModern().traversal();
@Test
public void gDotEShouldOnlyCreateEdges() throws Exception {
QueryTask qt = createQueryTask(gModern.E().elementMap(), true);
Map<GraphElementType, FileSpecificLabelSchemas> results = qt.call();
FileSpecificLabelSchemas nodeSchemas = results.get(GraphElementType.nodes);
FileSpecificLabelSchemas edgeSchemas = results.get(GraphElementType.edges);
assertEquals(0, nodeSchemas.labels().size());
assertEquals(2, edgeSchemas.labels().size());
assertTrue(edgeSchemas.hasSchemasForLabel(new Label("knows")));
assertTrue(edgeSchemas.hasSchemasForLabel(new Label("created")));
}
@Test
public void gDotVShouldOnlyCreateNodes() throws Exception {
QueryTask qt = createQueryTask(gModern.V().elementMap(), true);
Map<GraphElementType, FileSpecificLabelSchemas> results = qt.call();
FileSpecificLabelSchemas nodeSchemas = results.get(GraphElementType.nodes);
FileSpecificLabelSchemas edgeSchemas = results.get(GraphElementType.edges);
assertEquals(2, nodeSchemas.labels().size());
assertEquals(0, edgeSchemas.labels().size());
assertTrue(nodeSchemas.hasSchemasForLabel(new Label("person")));
assertTrue(nodeSchemas.hasSchemasForLabel(new Label("software")));
}
@Test
public void shouldSeparateEdgesAndNodes() throws Exception {
QueryTask qt = createQueryTask(gModern.V().union(__.hasLabel("person"), __.outE().hasLabel("created")).elementMap(), true);
Map<GraphElementType, FileSpecificLabelSchemas> results = qt.call();
FileSpecificLabelSchemas nodeSchemas = results.get(GraphElementType.nodes);
FileSpecificLabelSchemas edgeSchemas = results.get(GraphElementType.edges);
assertEquals(1, nodeSchemas.labels().size());
assertEquals(1, edgeSchemas.labels().size());
assertTrue(nodeSchemas.hasSchemasForLabel(new Label("person")));
assertTrue(edgeSchemas.hasSchemasForLabel(new Label("created")));
}
private NeptuneGremlinClient.QueryClient getMockClient(GraphTraversal traversal) {
NeptuneGremlinClient.QueryClient mockClient = mock(NeptuneGremlinClient.QueryClient.class);
ResultSet results = mock(ResultSet.class);
when(results.stream()).thenReturn(traversal.toStream().map(r -> new Result(r)));
when(mockClient.submit(any(), any())).thenReturn(results);
return mockClient;
}
private QueryTask createQueryTask(GraphTraversal traversal, boolean structuredOutput) throws IOException {
Queue<NamedQuery> mockQueries = new LinkedList<>();
mockQueries.add(mock(NamedQuery.class));
PropertyGraphTargetConfig targetConfig = mock(PropertyGraphTargetConfig.class);
when(targetConfig.createPrinterForEdges(any(), any())).thenReturn(mock(PropertyGraphPrinter.class));
when(targetConfig.createPrinterForNodes(any(), any())).thenReturn(mock(PropertyGraphPrinter.class));
return new QueryTask(mockQueries,
getMockClient(traversal),
targetConfig,
false,
10000L,
new Status(StatusOutputFormat.Description, "query results test"),
new AtomicInteger(),
structuredOutput,
new AllLabels(NodeLabelStrategy.nodeLabelsOnly),
new AllLabels(EdgeLabelStrategy.edgeLabelsOnly)
);
}
}
| 862 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/EdgeWriterTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.AllLabels;
import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.EdgesClient;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.GremlinFilters;
import com.amazonaws.services.neptune.propertygraph.LabelStrategy;
import com.amazonaws.services.neptune.propertygraph.LabelsFilter;
import com.amazonaws.services.neptune.propertygraph.Range;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.io.result.QueriesEdgeResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class EdgeWriterTest {
private EdgesClient client;
private GraphTraversalSource gModern;
@Before
public void setup() {
gModern = TinkerFactory.createModern().traversal();
ExportStats mockStats = mock(ExportStats.class);
FeatureToggles mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new EdgesClient(gModern, false, mockStats, mockFeatures);
}
@Test
public void shouldHandlePGEdgeResultWithEdgeLabelsOnly() throws IOException {
PGEdgeResult edgeResult = getPGEdgeResult("7", new AllLabels(EdgeLabelStrategy.edgeLabelsOnly));
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeLabelsOnly.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
@Test
public void shouldHandlePGEdgeResultWithEdgeAndVertexLabels() throws IOException {
PGEdgeResult edgeResult = getPGEdgeResult("7", new AllLabels(EdgeLabelStrategy.edgeAndVertexLabels));
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeAndVertexLabels.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2, fromLabels{person, }, toLabels{person, }] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
@Test
public void shouldHandleQueriesEdgeResultWithEdgeLabelsOnly() throws IOException {
QueriesEdgeResult edgeResult = getQueriesEdgeResult("7");
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeLabelsOnly.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
@Test
public void shouldHandleQueriesEdgeResultWithEdgeAndVertexLabels() throws IOException {
QueriesEdgeResult edgeResult = getQueriesEdgeResult("7");
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
EdgeWriter edgeWriter = new EdgeWriter(pgPrinter, EdgeLabelStrategy.edgeAndVertexLabels.getLabelFor(edgeResult));
edgeWriter.handle(edgeResult, true);
String expected = "Start Row\n" +
"Edge[7, knows, 1, 2, fromLabels{person, }, toLabels{person, }] Properties{weight:0.5, } \n";
assertEquals(expected, pgPrinter.getOutput());
}
private PGEdgeResult getPGEdgeResult(String id, LabelsFilter labelsFilter) {
final PGEdgeResult[] result = {null};
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
if(element.getId().equals(id)) {
result[0] = (PGEdgeResult) element;
}
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, labelsFilter,
GremlinFilters.EMPTY, new GraphElementSchemas());
return result[0];
}
private QueriesEdgeResult getQueriesEdgeResult(String id) {
return new QueriesEdgeResult(gModern.E(id).elementMap().next());
}
}
| 863 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/VariableRowCsvPropertyGraphPrinterTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.io.PrintOutputWriter;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.schema.DataType;
import com.amazonaws.services.neptune.propertygraph.schema.LabelSchema;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Map;
import static com.amazonaws.services.neptune.util.MapUtils.entry;
import static com.amazonaws.services.neptune.util.MapUtils.map;
import static org.junit.Assert.*;
public class VariableRowCsvPropertyGraphPrinterTest {
private final PrinterOptions printerOptions = PrinterOptions.NULL_OPTIONS;
@Test
public void appendsPreviouslyUnseenColumnsToEndOfRow() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("fname", "fname1")),
map(entry("fname", "fname2"), entry("lname", "lname2")),
map(entry("fname", "fname3"), entry("age", 30)),
map(entry("lname", "lname4"), entry("age", 40)),
map(entry("fname", "fname5"), entry("lname", "lname5"), entry("age", 50))
);
String expectedOutput = "\"fname1\"\n" +
"\"fname2\",\"lname2\"\n" +
"\"fname3\",,30\n" +
",\"lname4\",40\n" +
"\"fname5\",\"lname5\",50\n";
assertEquals(expectedOutput, stringWriter.toString());
}
@Test
public void updatesDataTypesForColumnsWithEachNewRow() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("age", 10)),
map(entry("age", "ten"), entry("height", 5)),
map(entry("age", 11), entry("height", 5.2))
);
assertEquals(2, labelSchema.propertyCount());
assertEquals(DataType.String, labelSchema.getPropertySchema("age").dataType());
assertEquals(DataType.Double, labelSchema.getPropertySchema("height").dataType());
}
@Test
public void columnsThatDoNotAppearInFirstRowAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-1", 30), entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertFalse(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
@Test
public void columnsThatAppearInFirstRowButNotSubsequentRowsAreNullable() throws IOException {
StringWriter stringWriter = new StringWriter();
LabelSchema labelSchema = new LabelSchema(new Label("my-label"));
VariableRowCsvPropertyGraphPrinter printer = new VariableRowCsvPropertyGraphPrinter(
new PrintOutputWriter("test", stringWriter),
labelSchema,
printerOptions);
print(printer,
map(entry("p-1", 10), entry("p-2", 20)),
map(entry("p-2", 40), entry("p-3", 50)),
map(entry("p-1", 60), entry("p-2", 70), entry("p-4", 80))
);
assertTrue(labelSchema.getPropertySchema("p-1").isNullable());
assertFalse(labelSchema.getPropertySchema("p-2").isNullable());
assertTrue(labelSchema.getPropertySchema("p-3").isNullable());
assertTrue(labelSchema.getPropertySchema("p-4").isNullable());
}
private void print(PropertyGraphPrinter printer, Map<?, ?>... rows) throws IOException {
for (Map<?, ?> row : rows) {
printer.printStartRow();
printer.printProperties(row);
printer.printEndRow();
}
}
}
| 864 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/NodeWriterTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.export.FeatureToggles;
import com.amazonaws.services.neptune.propertygraph.AllLabels;
import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.EdgesClient;
import com.amazonaws.services.neptune.propertygraph.ExportStats;
import com.amazonaws.services.neptune.propertygraph.GremlinFilters;
import com.amazonaws.services.neptune.propertygraph.NodeLabelStrategy;
import com.amazonaws.services.neptune.propertygraph.NodesClient;
import com.amazonaws.services.neptune.propertygraph.Range;
import com.amazonaws.services.neptune.propertygraph.io.result.PGEdgeResult;
import com.amazonaws.services.neptune.propertygraph.io.result.PGResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementSchemas;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NodeWriterTest {
private NodesClient client;
@Before
public void setup() {
GraphTraversalSource graphTraversalSource = TinkerFactory.createModern().traversal();
ExportStats mockStats = mock(ExportStats.class);
FeatureToggles mockFeatures = mock(FeatureToggles.class);
when(mockFeatures.containsFeature(Mockito.any())).thenReturn(false);
client = new NodesClient(graphTraversalSource, false, mockStats, mockFeatures);
}
@Test
public void testHandle() throws IOException {
PGResult nodeResult = getPGNodeResult("1");
PropertyGraphStringPrinter pgPrinter = new PropertyGraphStringPrinter();
NodeWriter nodeWriter = new NodeWriter(pgPrinter);
nodeWriter.handle(nodeResult, true);
String expected = "Start Row\n" +
"Node[1, Labels{person, }] Properties{name:[marko], age:[29], } \n";
assertEquals(expected, pgPrinter.getOutput());
}
private PGResult getPGNodeResult(String id) {
final PGResult[] result = {null};
GraphElementHandler<PGResult> handler = new GraphElementHandler<PGResult>() {
@Override
public void handle(PGResult element, boolean allowTokens) throws IOException {
if(element.getId().equals(id)) {
result[0] = element;
}
}
@Override
public void close() throws Exception {}
};
client.queryForValues(handler, Range.ALL, new AllLabels(NodeLabelStrategy.nodeLabelsOnly),
GremlinFilters.EMPTY, new GraphElementSchemas());
return result[0];
}
}
| 865 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/PropertyGraphStringPrinter.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema;
import com.amazonaws.services.neptune.util.NotImplementedException;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
class PropertyGraphStringPrinter implements PropertyGraphPrinter {
StringBuilder output = new StringBuilder();
public String getOutput() {
return output.toString();
}
@Override
public String outputId() {
return null;
}
@Override
public void printHeaderMandatoryColumns(String... columns) {
throw new NotImplementedException();
}
@Override
public void printHeaderRemainingColumns(Collection<PropertySchema> remainingColumns) {
throw new NotImplementedException();
}
@Override
public void printProperties(Map<?, ?> properties) throws IOException {
output.append("Properties{");
properties.forEach((key, value) -> {
output.append(key.toString() + ":" + value.toString() + ", ");
});
output.append("} ");
}
@Override
public void printProperties(Map<?, ?> properties, boolean applyFormatting) throws IOException {
printProperties(properties);
}
@Override
public void printProperties(String id, String streamOperation, Map<?, ?> properties) throws IOException {
printProperties(properties);
}
@Override
public void printEdge(String id, String label, String from, String to) throws IOException {
output.append(String.format("Edge[%s, %s, %s, %s] ", id, label, from, to));
}
@Override
public void printEdge(String id, String label, String from, String to, Collection<String> fromLabels, Collection<String> toLabels) throws IOException {
StringBuilder builder = new StringBuilder();
builder.append(String.format("Edge[%s, %s, %s, %s, fromLabels{", id, label, from, to));
for (String fromLabel : fromLabels) {
builder.append(fromLabel).append(", ");
}
builder.append("}, toLabels{");
for (String toLabel : toLabels) {
builder.append(toLabel).append(", ");
}
builder.append("}] ");
output.append(builder.toString());
}
@Override
public void printNode(String id, List<String> labels) throws IOException {
StringBuilder builder = new StringBuilder();
builder.append(String.format("Node[%s, Labels{", id));
for (String label : labels) {
builder.append(label).append(", ");
}
builder.append("}] ");
output.append(builder.toString());
}
@Override
public void printStartRow() throws IOException {
output.append("Start Row\n");
}
@Override
public void printEndRow() throws IOException {
output.append("\n");
}
@Override
public void close() throws Exception {
}
}
| 866 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/QueriesEdgeResultTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.propertygraph.io.result.QueriesEdgeResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class QueriesEdgeResultTest {
private final GraphTraversalSource gModern = TinkerFactory.createModern().traversal();
/**
* Wrap TinkerPop's modern graph g.E(9) in a QueriesEdgeResult and asserts correct results
*/
@Test
public void testStandardEdgeElementMap() {
QueriesEdgeResult modernE9 = new QueriesEdgeResult(gModern.E("9").elementMap().next());
assertEquals(GraphElementType.edges, modernE9.getGraphElementType());
assertEquals(Collections.singletonList("created"), modernE9.getLabel());
assertEquals("9", modernE9.getId());
Map<String, Object> properties = new HashMap<>();
properties.put("weight", 0.4);
assertEquals(properties, modernE9.getProperties());
assertEquals("1", modernE9.getFrom());
assertEquals("3", modernE9.getTo());
assertEquals(Collections.singletonList("person"), modernE9.getFromLabels());
assertEquals(Collections.singletonList("software"), modernE9.getToLabels());
}
@Test
public void testEdgeWithNoProperties() {
Map e9 = gModern.E("9").elementMap().next();
e9.remove("weight");
QueriesEdgeResult queriesEdgeResult = new QueriesEdgeResult(e9);
assertEquals(new HashMap<String, Object>(), queriesEdgeResult.getProperties());
}
}
| 867 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/io/QueriesNodeResultTest.java
|
package com.amazonaws.services.neptune.propertygraph.io;
import com.amazonaws.services.neptune.propertygraph.io.result.QueriesNodeResult;
import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
public class QueriesNodeResultTest {
private final GraphTraversalSource gModern = TinkerFactory.createModern().traversal();
/**
* Wrap TinkerPop's modern graph g.V(1) in a QueriesNodeResult and asserts correct results
*/
@Test
public void testStandardNodeElementMap() {
QueriesNodeResult modernV1 = new QueriesNodeResult(gModern.V("1").elementMap().next());
assertEquals(GraphElementType.nodes, modernV1.getGraphElementType());
assertEquals(Collections.singletonList("person"), modernV1.getLabel());
assertEquals("1", modernV1.getId());
Map<String, Object> properties = new HashMap<>();
properties.put("name", "marko");
properties.put("age", 29);
assertEquals(properties, modernV1.getProperties());
assertThrows(IllegalStateException.class, () -> {modernV1.getFrom();});
assertThrows(IllegalStateException.class, () -> {modernV1.getTo();});
assertThrows(IllegalStateException.class, () -> {modernV1.getFromLabels();});
assertThrows(IllegalStateException.class, () -> {modernV1.getToLabels();});
}
@Test
public void testNodeWithNoProperties() {
Map v1 = gModern.V("1").elementMap().next();
v1.remove("name");
v1.remove("age");
QueriesNodeResult queriesNodeResult = new QueriesNodeResult(v1);
assertEquals(new HashMap<String, Object>(), queriesNodeResult.getProperties());
}
}
| 868 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/DataTypeTest.java
|
/*
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Date;
import static org.junit.Assert.assertEquals;
public class DataTypeTest {
@Test
public void emptyStringDateValueShouldReturnEmptyString() {
String result = DataType.Date.format("");
assertEquals("", result);
}
@Test
public void shouldEscapeDoubleQuotes() {
String result = DataType.String.format("One \"two\" three");
assertEquals("\"One \"\"two\"\" three\"", result);
}
@Test
public void shouldEscapeTwoDoubleQuotes() {
String result = DataType.String.format("One \"\"two\"\" three");
assertEquals("\"One \"\"\"\"two\"\"\"\" three\"", result);
}
@Test
public void shouldEscapeThreeDoubleQuotes() {
String result = DataType.String.format("One \"\"\"two\"\"\" three");
assertEquals("\"One \"\"\"\"\"\"two\"\"\"\"\"\" three\"", result);
}
@Test
public void shouldRoundTripDate() {
Date now = new Date();
DataType dataType = DataType.dataTypeFor(now.getClass());
String nowString = dataType.format(now);
Object converted = dataType.convert(nowString);
assertEquals(now, converted);
}
@Test
public void shouldRoundTripDateWhenCallingFormatWithEscapeNewlineParam() {
Date now = new Date();
DataType dataType = DataType.dataTypeFor(now.getClass());
String nowString = dataType.format(now, false);
Object converted = dataType.convert(nowString);
assertEquals(now, converted);
}
@Test
public void shouldNotEscapeNewlineChar(){
String result = DataType.String.format("A\nB");
assertEquals("\"A\nB\"", result);
}
@Test
public void shouldNotEscapeNewline(){
String result = DataType.String.format("A" + System.lineSeparator() + "B");
assertEquals("\"A\nB\"", result);
}
@Test
public void shouldEscapeNewlineCharIfEscapeNewlineSetToTrue(){
String result = DataType.String.format("A\nB", true);
assertEquals("\"A\\nB\"", result);
}
@Test
public void shouldEscapeNewlineIfEscapeNewlineSetToTrue(){
String result = DataType.String.format("A" + System.lineSeparator() + "B", true);
assertEquals("\"A\\nB\"", result);
}
@Test
public void doubleShouldWriteIntAsDouble() throws IOException {
String result1 = createJsonArray(generator -> DataType.Double.printTo(generator, 0));
assertEquals("[0.0]", result1);
String result2 = createJsonObject(generator -> DataType.Double.printTo(generator, "value", 0));
assertEquals("{\"value\":0.0}", result2);
}
@Test
public void longShouldWriteIntAsLong() throws IOException {
String result1 = createJsonArray(generator -> DataType.Long.printTo(generator, 0));
assertEquals("[0]", result1);
String result2 = createJsonObject(generator -> DataType.Long.printTo(generator, "value", 0));
assertEquals("{\"value\":0}", result2);
}
@Test
public void floatShouldWriteIntAsFloat() throws IOException {
String result1 = createJsonArray(generator -> DataType.Float.printTo(generator, 0));
assertEquals("[0.0]", result1);
String result2 = createJsonObject(generator -> DataType.Float.printTo(generator, "value", 0));
assertEquals("{\"value\":0.0}", result2);
}
@Test
public void shortShouldWriteIntAsShort() throws IOException {
String result1 = createJsonArray(generator -> DataType.Short.printTo(generator, 0));
assertEquals("[0]", result1);
String result2 = createJsonObject(generator -> DataType.Short.printTo(generator, "value", 0));
assertEquals("{\"value\":0}", result2);
}
@Test
public void byteShouldWriteIntAsByte() throws IOException {
String result1 = createJsonArray(generator -> DataType.Byte.printTo(generator, 0));
assertEquals("[0]", result1);
String result2 = createJsonObject(generator -> DataType.Byte.printTo(generator, "value", 0));
assertEquals("{\"value\":0}", result2);
}
@Test
public void boolShouldWriteIntAsBool() throws IOException {
String result1 = createJsonArray(generator -> DataType.Boolean.printTo(generator, 0));
assertEquals("[false]", result1);
String result2 = createJsonObject(generator -> DataType.Boolean.printTo(generator, "value", 0));
assertEquals("{\"value\":false}", result2);
}
private String createJsonArray(UseDataType useDataType) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(writer);
jsonGenerator.writeStartArray();
useDataType.apply(jsonGenerator);
jsonGenerator.writeEndArray();
jsonGenerator.flush();
return writer.toString();
}
private String createJsonObject(UseDataType useDataType) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(writer);
jsonGenerator.writeStartObject();
useDataType.apply(jsonGenerator);
jsonGenerator.writeEndObject();
jsonGenerator.flush();
return writer.toString();
}
private interface UseDataType{
void apply(JsonGenerator generator) throws IOException;
}
}
| 869 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/LabelSchemaTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import com.amazonaws.services.neptune.propertygraph.Label;
import org.junit.Test;
import java.util.EnumSet;
import static org.junit.Assert.*;
public class LabelSchemaTest {
@Test
public void unioningShouldUpdateDataTypesOfExistingProperties(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p3", new PropertySchema("p3", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p3", new PropertySchema("p3", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema result = labelSchema1.union(labelSchema2);
assertEquals(result.getPropertySchema("p1"),
new PropertySchema("p1", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
assertEquals(result.getPropertySchema("p2"),
new PropertySchema("p2", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
assertEquals(result.getPropertySchema("p3"),
new PropertySchema("p3", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
}
@Test
public void unioningShouldAddNewProperties(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p3", new PropertySchema("p3", false, DataType.Double, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p4", new PropertySchema("p4", false, DataType.String, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p5", new PropertySchema("p5", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
LabelSchema result = labelSchema1.union(labelSchema2);
assertEquals(5, result.propertySchemas().size());
assertEquals(result.getPropertySchema("p4"),
new PropertySchema("p4", false, DataType.String, false, EnumSet.noneOf(DataType.class)));
assertEquals(result.getPropertySchema("p5"),
new PropertySchema("p5", false, DataType.Integer, true, EnumSet.noneOf(DataType.class)));
}
@Test
public void schemasWithSameLabelAndPropertySchemasAreSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertTrue(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithDifferentLabelsAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("this-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("that-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithDifferentPropertiesAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Double, true, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithDifferentNumberOfPropertiesAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p3", new PropertySchema("p3", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema3 = new LabelSchema(new Label("my-label"));
labelSchema3.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
assertFalse(labelSchema1.isSameAs(labelSchema3));
}
@Test
public void schemasWithPropertySchemasInDifferentOrderAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
@Test
public void schemasWithPropertiesWithDifferentNullableCharacteristicsAreNotSame(){
LabelSchema labelSchema1 = new LabelSchema(new Label("my-label"));
labelSchema1.put("p1", new PropertySchema("p1", true, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema1.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
LabelSchema labelSchema2 = new LabelSchema(new Label("my-label"));
labelSchema2.put("p1", new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
labelSchema2.put("p2", new PropertySchema("p2", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
assertFalse(labelSchema1.isSameAs(labelSchema2));
}
}
| 870 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/GraphElementSchemasTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import com.amazonaws.services.neptune.propertygraph.Label;
import org.junit.Test;
import static com.amazonaws.services.neptune.util.MapUtils.entry;
import static com.amazonaws.services.neptune.util.MapUtils.map;
import static org.junit.Assert.*;
public class GraphElementSchemasTest {
@Test
public void canCreateCopyOfSelf(){
GraphElementSchemas original = new GraphElementSchemas();
original.update(new Label("label1"), map(entry("fname", "fname-1")), false);
original.update(new Label("label1"), map(entry("lname", "lname-1")), false);
original.update(new Label("label2"), map(entry("fname", "fname-2"), entry("lname", "lname-2")), false);
GraphElementSchemas copy = original.createCopy();
assertEquals(original.toJson(), copy.toJson());
assertNotEquals(original, copy);
}
}
| 871 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/propertygraph/schema/PropertySchemaTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.propertygraph.schema;
import org.junit.Test;
import java.util.EnumSet;
import static org.junit.Assert.*;
public class PropertySchemaTest {
@Test
public void revisionWhereAtLeastOneSchemaIsMultiValueShouldResultInMultiValue(){
PropertySchema schema1 = new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class));
PropertySchema schema2 = new PropertySchema("p1", false, DataType.Integer, true, EnumSet.noneOf(DataType.class));
assertTrue(schema1.union(schema2).isMultiValue());
assertTrue(schema2.union(schema1).isMultiValue());
}
@Test
public void revisionWhereAtLeastOneSchemaIsNullableShouldResultInNullable(){
PropertySchema schema1 = new PropertySchema("p1", false, DataType.Integer, false, EnumSet.noneOf(DataType.class));
PropertySchema schema2 = new PropertySchema("p1", true, DataType.Integer, false, EnumSet.noneOf(DataType.class));
assertTrue(schema1.union(schema2).isNullable());
assertTrue(schema2.union(schema1).isNullable());
}
@Test
public void shouldEscapePropertyNameContainingColons(){
PropertySchema schema = new PropertySchema("p1:a:b:c", false, DataType.Integer, false, EnumSet.noneOf(DataType.class));
assertEquals("p1\\:a\\:b\\:c:int", schema.nameWithDataType(true));
assertEquals("p1\\:a\\:b\\:c", schema.nameWithoutDataType(true));
assertEquals("p1:a:b:c:int", schema.nameWithDataType());
assertEquals("p1:a:b:c", schema.nameWithoutDataType());
}
}
| 872 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/incremental_export/StreamRecordsNotFoundExceptionParserTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.incremental_export;
import com.amazonaws.services.neptune.cluster.EventId;
import com.amazonaws.services.neptune.cluster.StreamRecordsNotFoundExceptionParser;
import org.junit.Test;
import static org.junit.Assert.*;
public class StreamRecordsNotFoundExceptionParserTest {
@Test
public void ShouldParseCommitNumAndOpNum(){
String errorMessage = "Requested startEventId is from the future. Last valid eventId is [commitNum = 1132, opNum = 200]";
EventId lastEventId = StreamRecordsNotFoundExceptionParser.parseLastEventId(errorMessage);
assertEquals(1132, lastEventId.commitNum());
assertEquals(200, lastEventId.opNum());
}
@Test
public void ShouldReturnMinus1IfNotFound(){
String errorMessage = "Requested startEventId is from the future";
EventId lastEventId = StreamRecordsNotFoundExceptionParser.parseLastEventId(errorMessage);
assertEquals(-1, lastEventId.commitNum());
assertEquals(-1, lastEventId.opNum());
}
}
| 873 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/JsonFromResource.java
|
package com.amazonaws.services.neptune.profiles.neptune_ml;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.File;
import java.io.IOException;
import java.util.Objects;
public class JsonFromResource {
public static JsonNode get(String filename, Class<?> testClass) throws IOException {
String path = String.format("%s/%s", testClass.getSimpleName(), filename);
ClassLoader classLoader = testClass.getClassLoader();
File file = new File(Objects.requireNonNull(classLoader.getResource(path)).getFile());
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readTree(file);
}
}
| 874 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/Output.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
public class Output {
public static String format(JsonNode json) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(json);
}
private final StringWriter writer = new StringWriter();
private final JsonGenerator generator;
public Output() throws IOException {
this.generator = createJsonGenerator(writer);
}
public JsonGenerator generator(){
return generator;
}
public JsonNode allOutput() throws JsonProcessingException {
return new ObjectMapper().readTree(writer.toString());
}
public JsonNode graph() throws JsonProcessingException {
return new ObjectMapper().readTree(writer.toString()).path("graph");
}
public ArrayNode warnings() throws JsonProcessingException {
return (ArrayNode) new ObjectMapper().readTree(writer.toString()).path("warnings");
}
private JsonGenerator createJsonGenerator(Writer writer) throws IOException {
JsonGenerator generator = new JsonFactory().createGenerator(writer);
generator.setPrettyPrinter(new DefaultPrettyPrinter());
return generator;
}
}
| 875 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1FeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class PropertyGraphTrainingDataConfigWriterV1FeatureTest {
@Test
public void shouldWriteNewObjectForEach() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
nodeSchemas.addLabelSchema(new LabelSchema(new Label("Person")), Arrays.asList("person-1.csv", "person-2.csv"));
edgeSchemas.addLabelSchema(new LabelSchema(new Label("follows")), Arrays.asList("follows-1.csv", "follows-2.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(4, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertEquals("nodes/person-1.csv", array.get(0).path("file_name").textValue());
Assert.assertEquals("nodes/person-2.csv", array.get(1).path("file_name").textValue());
Assert.assertEquals("edges/follows-1.csv", array.get(2).path("file_name").textValue());
Assert.assertEquals("edges/follows-2.csv", array.get(3).path("file_name").textValue());
}
@Test
public void everyObjectShouldHaveACommaSeparator() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
nodeSchemas.addLabelSchema(new LabelSchema(new Label("Person")), Collections.singletonList("person-1.csv"));
edgeSchemas.addLabelSchema(new LabelSchema(new Label("follows")), Collections.singletonList("follows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(2, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertEquals(",", array.get(0).path("separator").textValue());
Assert.assertEquals(",", array.get(1).path("separator").textValue());
}
@Test
public void edgesShouldIncludeEdgeSpec() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
edgeSchemas.addLabelSchema(new LabelSchema(
new Label(
"follows",
Arrays.asList("Person", "Admin"),
Arrays.asList("Person", "Temp"))),
Collections.singletonList("follows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode edges = (ArrayNode) array.get(0).path("edges");
Assert.assertEquals(1, edges.size());
JsonNode edge = edges.get(0);
Assert.assertEquals("edge", edge.path("edge_spec_type").textValue());
ArrayNode cols = (ArrayNode) edge.path("cols");
Assert.assertEquals("~from", cols.get(0).textValue());
Assert.assertEquals("~to", cols.get(1).textValue());
ArrayNode edgeType = (ArrayNode) edge.path("edge_type");
Assert.assertEquals("Admin;Person", edgeType.get(0).textValue());
Assert.assertEquals("follows", edgeType.get(1).textValue());
Assert.assertEquals("Person;Temp", edgeType.get(2).textValue());
}
@Test
public void singleValueFloatFeatureForVertex() throws IOException {
DataType dataType = DataType.Float;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Arrays.asList("Person", "Admin")));
labelSchema.put("rating", new PropertySchema("rating", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
Assert.assertEquals("min-max", feature.path("norm").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("rating", cols.get(1).textValue());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void shouldNotIncludeFeatureForMultiValueFloatFeatureForVertex() throws IOException {
DataType dataType = DataType.Float;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Collections.singletonList("Movie")));
labelSchema.put("encoding", new PropertySchema("encoding", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(0, features.size());
}
@Test
public void intFeatureForVertex() throws IOException {
DataType dataType = DataType.Integer;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Arrays.asList("Person", "Admin")));
labelSchema.put("age", new PropertySchema("age", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("min-max", feature.path("norm").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("age", cols.get(1).textValue());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void singleValueStringFeatureForVertex() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Collections.singletonList("Movie")));
labelSchema.put("class", new PropertySchema("class", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("class", cols.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void multiValueStringFeatureForVertex() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(new Label(Collections.singletonList("Movie")));
labelSchema.put("movieType", new PropertySchema("movieType", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
Assert.assertEquals(";", feature.path("separator").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("movieType", cols.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
}
@Test
public void shouldAddWord2VecFeatureIfSpecifiedInConfig() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("genre", new PropertySchema("genre", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withWord2VecNodeFeature(
movieLabel,
"genre",
"en_core_web_lg", "fr_core_news_lg")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("word2vec", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("genre", cols.get(1).textValue());
ArrayNode language = (ArrayNode) feature.path("language");
Assert.assertEquals(2, language.size());
Assert.assertEquals("en_core_web_lg", language.get(0).textValue());
Assert.assertEquals("fr_core_news_lg", language.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void shouldNumericalBucketFeatureIfSpecifiedInConfig() throws IOException {
DataType dataType = DataType.Integer;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("score", new PropertySchema("score", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNumericalBucketFeature(movieLabel, "score", new Range(1, 100), 10, 2)
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("bucket_numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Movie", feature.path("node_type").textValue());
Assert.assertEquals(10, feature.path("bucket_cnt").intValue());
Assert.assertEquals(2, feature.path("slide_window_size").intValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("score", cols.get(1).textValue());
ArrayNode range = (ArrayNode) feature.path("range");
Assert.assertEquals(2, range.size());
Assert.assertEquals(1, range.get(0).intValue());
Assert.assertEquals(100, range.get(1).intValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
@Test
public void shouldAddNumericalBucketFeatureForAllNumberTypes() throws IOException {
Collection<DataType> dataTypes = Arrays.asList(DataType.Byte, DataType.Integer, DataType.Double, DataType.Float, DataType.Long, DataType.Short);
boolean isNullable = false;
boolean isMultiValue = false;
for (DataType dataType : dataTypes) {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("score", new PropertySchema("score", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNumericalBucketFeature(movieLabel, "score", new Range(1, 100), 10, 2)
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("bucket_numerical", feature.path("sub_feat_type").textValue());
}
}
@Test
public void shouldAddWarningIfAttemptingToCreateNumericalBucketFeatureForMultiValueDataType() throws IOException {
DataType dataType = DataType.Integer;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label movieLabel = new Label(Collections.singletonList("Movie"));
LabelSchema labelSchema = new LabelSchema(movieLabel);
labelSchema.put("score", new PropertySchema("score", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("movie-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNumericalBucketFeature(movieLabel, "score", new Range(1, 100), 10, 2)
.build())
.write();
JsonNode graph = output.graph();
ArrayNode warnings = output.warnings();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertTrue(array.get(0).path("labels").isMissingNode());
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(0, features.size());
Assert.assertEquals(1, warnings.size());
Assert.assertEquals("Unable to add numerical bucket feature: Property 'score' of node type 'Movie' is a multi-value property.", warnings.get(0).textValue());
}
@Test
public void singleValueNumericFeatureForEdge() throws IOException {
Collection<DataType> dataTypes = Arrays.asList(DataType.Byte, DataType.Integer, DataType.Double, DataType.Float, DataType.Long, DataType.Short);
boolean isNullable = false;
boolean isMultiValue = false;
for (DataType dataType : dataTypes) {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(new Label("knows", Collections.singletonList("Person"), Collections.singletonList("Person")));
labelSchema.put("strength", new PropertySchema("strength", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(graphSchema, output.generator(), PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE, PrinterOptions.NULL_OPTIONS).write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("edge", feature.path("feat_type").textValue());
Assert.assertEquals("numerical", feature.path("sub_feat_type").textValue());
Assert.assertEquals("min-max", feature.path("norm").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(3, cols.size());
Assert.assertEquals("~from", cols.get(0).textValue());
Assert.assertEquals("~to", cols.get(1).textValue());
Assert.assertEquals("strength", cols.get(2).textValue());
ArrayNode edgeType = (ArrayNode) feature.path("edge_type");
Assert.assertEquals(3, edgeType.size());
Assert.assertEquals("Person", edgeType.get(0).textValue());
Assert.assertEquals("knows", edgeType.get(1).textValue());
Assert.assertEquals("Person", edgeType.get(2).textValue());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
}
}
| 876 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1LabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import java.util.EnumSet;
public class PropertyGraphTrainingDataConfigWriterV1LabelTest {
@Test
public void shouldAddNodeClassLabelIfSpecifiedInConfig() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
Label personLabel = new Label(Collections.singletonList("Person"));
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(personLabel);
labelSchema.put("role", new PropertySchema("role", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeClassLabel(personLabel, "role")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals("node", label.path("label_type").textValue());
Assert.assertEquals("node_class_label", label.path("sub_label_type").textValue());
ArrayNode cols = (ArrayNode) label.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("role", cols.get(1).textValue());
ArrayNode splitRates = (ArrayNode) label.path("split_rate");
Assert.assertEquals(3, splitRates.size());
Assert.assertEquals(0.7, splitRates.get(0).doubleValue(), 0.0);
Assert.assertEquals(0.1, splitRates.get(1).doubleValue(), 0.0);
Assert.assertEquals(0.2, splitRates.get(2).doubleValue(), 0.0);
Assert.assertEquals("Person", label.path("node_type").textValue());
Assert.assertTrue(label.path("separator").isMissingNode());
}
@Test
public void shouldAddWarningIfColumnDoesNotExistForNodeClassLabel() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
Label personLabel = new Label(Collections.singletonList("Person"));
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(personLabel);
labelSchema.put("role", new PropertySchema("role", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeClassLabel(personLabel, "does-not-exist")
.build())
.write();
JsonNode graph = output.graph();
ArrayNode warnings = output.warnings();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertTrue(array.get(0).path("labels").isMissingNode());
Assert.assertEquals(1, warnings.size());
Assert.assertEquals("Unable to add node class label: Node of type 'Person' does not contain property 'does-not-exist'.", warnings.get(0).textValue());
}
@Test
public void shouldAddSeparatorIfNodeClassLabelIsMultiValued() throws IOException {
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = true;
Label personLabel = new Label(Collections.singletonList("Person"));
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
LabelSchema labelSchema = new LabelSchema(personLabel);
labelSchema.put("role", new PropertySchema("role", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeClassLabel(personLabel, "role")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals(";", label.path("separator").textValue());
}
@Test
public void shouldAddEdgeClassLabelIfSpecifiedInConfig() throws IOException {
Label knowsLabel = new Label("knows",
Collections.singletonList("Person"),
Collections.singletonList("Person"));
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(knowsLabel);
labelSchema.put("contact", new PropertySchema("contact", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withEdgeClassLabel(knowsLabel, "contact")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals("edge", label.path("label_type").textValue());
Assert.assertEquals("edge_class_label", label.path("sub_label_type").textValue());
ArrayNode cols = (ArrayNode) label.path("cols");
Assert.assertEquals(3, cols.size());
Assert.assertEquals("~from", cols.get(0).textValue());
Assert.assertEquals("~to", cols.get(1).textValue());
Assert.assertEquals("contact", cols.get(2).textValue());
ArrayNode splitRates = (ArrayNode) label.path("split_rate");
Assert.assertEquals(3, splitRates.size());
Assert.assertEquals(0.7, splitRates.get(0).doubleValue(), 0.0);
Assert.assertEquals(0.1, splitRates.get(1).doubleValue(), 0.0);
Assert.assertEquals(0.2, splitRates.get(2).doubleValue(), 0.0);
ArrayNode edgeType = (ArrayNode) label.path("edge_type");
Assert.assertEquals("Person", edgeType.get(0).textValue());
Assert.assertEquals("knows", edgeType.get(1).textValue());
Assert.assertEquals("Person", edgeType.get(2).textValue());
Assert.assertTrue(label.path("separator").isMissingNode());
}
@Test
public void shouldAddWarningIfColumnDoesNotExistForEdgeClassLabel() throws IOException {
Label knowsLabel = new Label("knows",
Collections.singletonList("Person"),
Collections.singletonList("Person"));
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(knowsLabel);
labelSchema.put("contact", new PropertySchema("contact", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withEdgeClassLabel(knowsLabel, "does-not-exist")
.build())
.write();
JsonNode graph = output.graph();
ArrayNode warnings = output.warnings();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
Assert.assertTrue(array.get(0).path("labels").isMissingNode());
Assert.assertEquals(1, warnings.size());
Assert.assertEquals("Unable to add edge class label: Edge of type 'knows' does not contain property 'does-not-exist'.", warnings.get(0).textValue());
}
@Test
public void shouldAddSeparatorIfEdgeClassLabelIsMultiValued() throws IOException {
Label knowsLabel = new Label("knows",
Collections.singletonList("Person"),
Collections.singletonList("Person"));
DataType dataType = DataType.String;
boolean isNullable = false;
boolean isMultiValue = true;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas edgeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.edges);
LabelSchema labelSchema = new LabelSchema(knowsLabel);
labelSchema.put("contact", new PropertySchema("contact", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
edgeSchemas.addLabelSchema(labelSchema, Collections.singletonList("knows-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withEdgeClassLabel(knowsLabel, "contact")
.build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode labels = (ArrayNode) array.get(0).path("labels");
Assert.assertEquals(1, labels.size());
JsonNode label = labels.get(0);
Assert.assertEquals(";", label.path("separator").textValue());
}
}
| 877 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/TrainingDataWriterConfigV1Test.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range;
import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.NumericalBucketFeatureConfigV1;
import com.amazonaws.services.neptune.propertygraph.Label;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
public class TrainingDataWriterConfigV1Test {
@Test
public void shouldThrowExceptionIfLowOrHighAreNotNumeric(){
Collection<Collection<?>> values = Arrays.asList(
Arrays.asList(1, "one"),
Arrays.asList("one", 1),
Arrays.asList(true, 1),
Arrays.asList(1, true)
);
for (Collection<?> value : values) {
Iterator<?> iterator = value.iterator();
Object low = iterator.next();
Object high = iterator.next();
try {
new NumericalBucketFeatureConfigV1(
new Label("my-label"),
"column", new Range(low, high), 10, 2);
Assert.fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
Assert.assertEquals("Low and high values must be numeric", e.getMessage());
}
}
}
@Test
public void shouldConvertLowOrHighToBroadestType(){
NumericalBucketFeatureConfigV1 config1 = new NumericalBucketFeatureConfigV1(
new Label("my-label"),
"column", new Range(1, 10L), 10, 2);
Assert.assertEquals(Long.class, config1.range().high().getClass());
Assert.assertEquals(Long.class, config1.range().low().getClass());
NumericalBucketFeatureConfigV1 config2 = new NumericalBucketFeatureConfigV1(
new Label("my-label"),
"column", new Range(0.1, 10), 10, 2);
Assert.assertEquals(Double.class, config2.range().high().getClass());
Assert.assertEquals(Double.class, config2.range().low().getClass());
}
}
| 878 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1FeatureOverrideTests.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v1;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator;
import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureOverrideConfigV1;
import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureTypeV1;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
public class PropertyGraphTrainingDataConfigWriterV1FeatureOverrideTests {
@Test
public void shouldOverrideNumericalWithCategoricalFeature() throws IOException {
DataType dataType = DataType.Float;
boolean isNullable = false;
boolean isMultiValue = false;
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label label = new Label(Arrays.asList("Person", "Admin"));
LabelSchema labelSchema = new LabelSchema(label);
labelSchema.put("rating", new PropertySchema("rating", isNullable, dataType, isMultiValue, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeFeatureOverride(
new FeatureOverrideConfigV1(
label,
Collections.singletonList("rating"),
FeatureTypeV1.category,
null,
new Separator(","))).build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(1, features.size());
JsonNode feature = features.get(0);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
Assert.assertEquals(",", feature.path("separator").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(2, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("rating", cols.get(1).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
}
@Test
public void shouldCreateMultiCategoricalFeature() throws IOException {
GraphSchema graphSchema = new GraphSchema();
GraphElementSchemas nodeSchemas = graphSchema.graphElementSchemasFor(GraphElementType.nodes);
Label label = new Label(Arrays.asList("Person", "Admin"));
LabelSchema labelSchema = new LabelSchema(label);
labelSchema.put("rating", new PropertySchema("rating", false, DataType.Float, false, EnumSet.noneOf(DataType.class)));
labelSchema.put("job", new PropertySchema("job", false, DataType.String, false, EnumSet.noneOf(DataType.class)));
labelSchema.put("rank", new PropertySchema("rank", false, DataType.Integer, false, EnumSet.noneOf(DataType.class)));
nodeSchemas.addLabelSchema(labelSchema, Collections.singletonList("person-1.csv"));
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV1(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
PropertyGraphTrainingDataConfigBuilderV1.builder()
.withNodeFeatureOverride(
new FeatureOverrideConfigV1(
label,
Arrays.asList("job", "rank"),
FeatureTypeV1.category,
null,
new Separator(","))).build())
.write();
JsonNode graph = output.graph();
Assert.assertEquals(1, graph.size());
ArrayNode array = (ArrayNode) graph;
ArrayNode features = (ArrayNode) array.get(0).path("features");
Assert.assertEquals(2, features.size());
JsonNode feature = features.get(1);
Assert.assertEquals("node", feature.path("feat_type").textValue());
Assert.assertEquals("category", feature.path("sub_feat_type").textValue());
Assert.assertEquals("Admin;Person", feature.path("node_type").textValue());
ArrayNode cols = (ArrayNode) feature.path("cols");
Assert.assertEquals(3, cols.size());
Assert.assertEquals("~id", cols.get(0).textValue());
Assert.assertEquals("job", cols.get(1).textValue());
Assert.assertEquals("rank", cols.get(2).textValue());
Assert.assertTrue(feature.path("norm").isMissingNode());
Assert.assertTrue(feature.path("separator").isMissingNode());
}
}
| 879 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseBucketCountV1Test.java
|
package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class ParseBucketCountV1Test {
@Test
public void throwsErrorIfInvalidBucketCount() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("num_buckets", "one");
try {
new ParseBucketCountV1(json, new ParsingContext("context")).parseBucketCount();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'num_buckets' field for context. Expected an integer.", e.getMessage());
}
}
}
| 880 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/NoneFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class NoneFeatureTest {
@Test
public void shouldAllowSpecifyingNoneFeatureType() throws IOException {
runTest("t1.json");
}
@Test
public void settingFeatureEncodingToNoneResultsInNoneFeatureTypeForAllFeatures() throws IOException {
runTest("t2.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 881 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/EdgeLabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class EdgeLabelTest {
@Test
public void shouldCreateLabelForPropertyWithSpecifiedConfigValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldSupplyDefaultSplitRateIfSplitRateNotSpecified() throws IOException {
runTest("t2.json");
}
@Test
public void shouldUseTopLevelDefaultSplitRateIfSpecified() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAllowEmptyPropertyForLinkPrediction() throws IOException {
runTest("t4.json");
}
@Test
public void shouldAllowMissingPropertyForLinkPrediction() throws IOException {
runTest("t5.json");
}
@Test
public void shouldThrowExceptionIfEmptyPropertyAndNotLinkPrediction() throws IOException {
try {
runTest("t6.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Missing or empty 'property' field for edge regression specification (Label: [Person, knows, Person]).", e.getMessage());
}
}
@Test
public void shouldThrowExceptionIfMissingPropertyAndNotLinkPrediction() throws IOException {
try {
runTest("t7.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Missing or empty 'property' field for edge regression specification (Label: [Person, knows, Person]).", e.getMessage());
}
}
@Test
public void shouldThrowExceptionIfUnrecognisedLabelType() throws IOException {
try {
runTest("t8.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'type' value for edge label (Label: [Person, knows, Person]): 'invalid'. Valid values are: 'classification', 'regression', 'link_prediction'.", e.getMessage());
}
}
@Test
public void shouldAllowComplexToAndFromLabels() throws IOException {
runTest("t9.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("label"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 882 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/BucketNumericalFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class BucketNumericalFeatureTest {
@Test
public void shouldCreateBucketNumericalFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldThrowErrorIfSeparatorSuppliedForBucketNumericalFeature() throws IOException {
try {
runTest("t2.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'separator' field for bucket_numerical feature. Bucket numerical feature property cannot contain multiple values.", e.getMessage());
}
}
@Test
public void shouldCreateAutoInferredFeatureIfMultiValueProperty() throws IOException {
runTest("t3.json");
}
@Test
public void shouldThrowErrorIfRangeIsMissing() throws IOException {
try {
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'range' field for bucket_numerical feature (Label: Person, Property: age). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfRangeIsSpecifiedIncorrectly() throws IOException {
try {
runTest("t8.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'range' field for bucket_numerical feature (Label: Person, Property: age). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfBucketCountIsMissing() throws IOException {
try {
runTest("t5.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'bucket_cnt' field for bucket_numerical feature (Label: Person, Property: age). Expected an integer.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfBucketCountIsSpecifiedIncorrectly() throws IOException {
try {
runTest("t9.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'bucket_cnt' field for bucket_numerical feature (Label: Person, Property: age). Expected an integer.", e.getMessage());
}
}
@Test
public void shouldSupplyDefaultSlideWindowSizeIfSlideWindowSizeIsMissing() throws IOException {
runTest("t6.json");
}
@Test
public void shouldOmitImputerIfImputerIsMissing() throws IOException {
runTest("t7.json");
}
@Test
public void shouldSupportOldNumBucketsField() throws IOException {
runTest("t10.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 883 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/DatetimeFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class DatetimeFeatureTest {
@Test
public void shouldCreateDatetimeFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldCreateDatetimeFeatureConfigWithFewerDatetimePartsValues() throws IOException {
runTest("t2.json");
}
@Test
public void shouldCreateDatetimeFeatureConfigForAllDatetimePartsIfDatetimePartsIsMissing() throws IOException {
runTest("t3.json");
}
@Test
public void shouldCreateDatetimeFeatureConfigForAllDatetimePartsIfDatetimePartsIsEmpty() throws IOException {
runTest("t4.json");
}
@Test
public void shouldThrowErrorIfInvalidDatetimePart() throws IOException {
try {
runTest("t5.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Invalid 'datetime_parts' value for datetime feature (Label: Person, Property: created): 'invalid'. Valid values are: 'hour', 'weekday', 'month', 'year'.", e.getMessage());
}
}
@Test
public void shouldCreateAutoFeatureConfigForMultiValueDateProperty() throws IOException {
runTest("t6.json");
}
@Test
public void shouldAutoInferDatetimeFeatureForDateProperty() throws IOException {
runTest("t7.json");
}
@Test
public void shouldAutoInferAutoFeatureForMultiValueDateProperty() throws IOException {
runTest("t8.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 884 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/PropertyGraphTrainingDataConfigWriterV2FeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.FeatureEncodingFlag;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
public class PropertyGraphTrainingDataConfigWriterV2FeatureTest {
@Test
public void shouldWriteVersionAndQueryEngine() throws IOException {
runTest("t1.json");
}
@Test
public void shouldAddNodeAndEdgeObjectForEachFile() throws IOException {
runTest("t2.json");
}
@Test
public void shouldAddAutoFeatureForSingleValueStringProperty() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorForMultiValueStringProperty() throws IOException {
runTest("t4.json");
}
@Test
public void shouldAddNumericFeatureWithNormMinMaxAndMedianImputerForSingleValueIntegerProperty() throws IOException {
runTest("t5.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorAndMedianImputerForMultiValueIntegerProperty() throws IOException {
runTest("t6.json");
}
@Test
public void shouldAddNumericFeatureWithNormMinMaxAndMedianImputerForSingleValueFloatProperty() throws IOException {
runTest("t7.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorAndMedianImputerForMultiValueFloatProperty() throws IOException {
runTest("t8.json");
}
@Test
public void shouldAddDatetimeFeatureWithAllDatetimePartsForSingleValueDateProperty() throws IOException {
runTest("t9.json");
}
@Test
public void shouldAddAutoFeatureWithSeparatorForMultiValueDateProperty() throws IOException {
runTest("t10.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 885 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/NumericalCategoryAndAutoFeatureOverrideTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class NumericalCategoryAndAutoFeatureOverrideTest {
@Test
public void shouldAllowNumericalOverrideAndSupplyDefaultConfigFieldValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldAllowNumericalOverrideAndSupplyDefaultConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t5.json");
}
@Test
public void shouldAllowNumericalOverrideAndUseSpecifiedConfigFieldValues() throws IOException {
runTest("t2.json");
}
@Test
public void shouldAllowNumericalOverrideAndUseSpecifiedConfigFieldValuesIncludingSeparatorForSingleValueProperty() throws IOException {
runTest("t8.json");
}
@Test
public void shouldAllowNumericalOverrideAndUseSpecifiedConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t6.json");
}
@Test
public void shouldAddWarningForOverrideForPropertyThatDoesNotExist() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAllowCategoryOverrideAndSupplyDefaultConfigFieldValues() throws IOException {
runTest("t4.json");
}
@Test
public void shouldAllowCategoryOverrideAndSupplyDefaultConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t7.json");
}
@Test
public void shouldAllowCategoryOverrideAndUseSpecifiedConfigFieldValuesForSingleValueProperty() throws IOException {
runTest("t9.json");
}
@Test
public void shouldAllowCategoryOverrideAndUseSpecifiedConfigFieldValuesForMultiValueProperty() throws IOException {
runTest("t12.json");
}
@Test
public void shouldAllowAutoOverrideForNumericalFeatureWithSuppliedSeparatorAndImputerIgnoringAllOtherConfigValues() throws IOException {
runTest("t10.json");
}
@Test
public void shouldAllowAutoOverrideForNumericalFeatureWithoutImputerIfNotSupplied() throws IOException {
runTest("t11.json");
}
@Test
public void autoOverrideForIntegerMultiValuePropertyWithNoAdditionalConfigValuesShouldProduceAutoFeatureWithSeparatorButNoImputer() throws IOException {
runTest("t13.json");
}
@Test
public void autoOverrideForIntegerSingleValuePropertyWithNoAdditionalConfigValuesShouldProduceAutoFeatureNoOtherConfig() throws IOException {
runTest("t14.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 886 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextFastTextFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TextFastTextFeatureTest {
@Test
public void shouldCreateFastTextFeatureConfigForNodeWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldCreateFastTextFeatureConfigForEdgeWithSuppliedValues() throws IOException {
runTest("t2.json");
}
@Test
public void shouldAddWarningForUnknownLanguage() throws IOException {
runTest("t3.json");
}
@Test
public void shouldRaiseErrorIfLanguageMissing() throws IOException {
try {
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'language' field for text_fasttext feature. Expected one of the following values: 'en', 'zh', 'hi', 'es', 'fr'.", e.getMessage());
}
}
@Test
public void shouldAllowMissingMaxLangthProperty() throws IOException {
runTest("t5.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 887 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextWord2VecFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
public class TextWord2VecFeatureTest {
@Test
public void shouldCreateWord2VecFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldUseDefaultLanguageIfLanguageIsMissing() throws IOException {
runTest("t2.json");
}
@Test
public void shouldUseDefaultLanguageIfLanguageIsEmpty() throws IOException {
runTest("t3.json");
}
@Test
public void shouldAddWarningIfUnsupportedLanguageIsSupplied() throws IOException {
runTest("t4.json");
}
@Test
public void shouldSupportOldWord2VecFeatureName() throws IOException {
runTest("t5.json");
}
@Test
public void shouldCreateAutoFeatureConfigForMultiValueProperty() throws IOException {
runTest("t6.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 888 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/NodeLabelTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class NodeLabelTest {
@Test
public void shouldCreateLabelForPropertyWithSpecifiedConfigValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldAddSeparatorForMultiValueProperty() throws IOException {
runTest("t2.json");
}
@Test
public void ifSeparatorIsSpecifiedForMultiValuePropertyThisIsUsedInsteadOfDefaultSeparator() throws IOException {
runTest("t8.json");
}
@Test
public void shouldSupplyDefaultSplitRateIfSplitRateNotSpecified() throws IOException {
runTest("t3.json");
}
@Test
public void shouldUseTopLevelDefaultSplitRateIfSpecified() throws IOException {
runTest("t4.json");
}
@Test
public void shouldUseSpecificSplitRateInPreferenceToTopLevelDefaultSplitRate() throws IOException {
runTest("t10.json");
}
@Test
public void shouldAddWarningIfPropertyDoesNotExist() throws IOException {
runTest("t5.json");
}
@Test
public void shouldThrowExceptionIfUnrecognisedLabelType() throws IOException {
try {
runTest("t6.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'type' value for node label (Label: Person, Property: credit): 'unknown'. Valid values are: 'classification', 'regression'.", e.getMessage());
}
}
@Test
public void classificationLabelShouldTakePrecedenceOverFeatureOverride() throws IOException {
runTest("t7.json");
}
@Test
public void shouldThrowAnErrorIfNoPropertySpecified() throws IOException {
try {
runTest("t9.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'property' field for node label (Label: Person). Expected a 'property' field with a string value.", e.getMessage());
}
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("label"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 889 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextTfIdfTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static junit.framework.TestCase.fail;
import static org.junit.Assert.assertEquals;
public class TextTfIdfTest {
@Test
public void shouldCreateTfTidFeatureConfigWithSuppliedValues() throws IOException {
runTest("t1.json");
}
@Test
public void shouldThrowErrorIfNgramRangeIsMissing() throws IOException {
try {
runTest("t2.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfNgramRangeHasOnlyOneElement() throws IOException {
try {
runTest("t3.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfNgramRangeHasMoreThanTwoElements() throws IOException {
try {
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfNgramRangeHasNonNumericElements() throws IOException {
try {
runTest("t5.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'ngram_range' field for text_tfidf feature (Label: Person, Property: bio). Expected an array with 2 numeric values.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMinDfIsMissing() throws IOException {
try {
runTest("t6.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'min_df' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMinDfIsNonNumeric() throws IOException {
try {
runTest("t7.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'min_df' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMaxFeaturesIsMissing() throws IOException {
try {
runTest("t8.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'max_features' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldThrowErrorIfMaxFeaturesIsNonNumeric() throws IOException {
try {
runTest("t9.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Error parsing 'max_features' field for text_tfidf feature (Label: Person, Property: bio). Expected an integer value.", e.getMessage());
}
}
@Test
public void shouldCreateAutoFeatureConfigForMultiValueProperty() throws IOException {
runTest("t10.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 890 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/TextSbertFeatureTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.Output;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2;
import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions;
import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TextSbertFeatureTest {
@Test
public void shouldCreateSbertTextFeatureConfigForNodeWithDefaultTextSBertType() throws IOException {
runTest("t1.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForNodeWithSBert128Type() throws IOException {
runTest("t2.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForNodeWithSBert512Type() throws IOException {
runTest("t3.json");
}
@Test
public void shouldThrowErrorForUnrecognizedSBertType() throws IOException {
try{
runTest("t4.json");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals(e.getMessage(), "Illegal feature type: 'text_sbertUnknown'. Supported values are: 'bucket_numerical', 'text_word2vec', 'text_fasttext', 'text_sbert', 'text_sbert128', 'text_sbert512', 'category', 'numerical', 'text_tfidf', 'datetime', 'auto', 'none'.");
}
}
@Test
public void shouldCreateSbertTextFeatureConfigForEdgeWithDefaultTextSBertType() throws IOException {
runTest("t5.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForEdgeWithSBert128Type() throws IOException {
runTest("t6.json");
}
@Test
public void shouldCreateSbertTextFeatureConfigForEdgeWithSBert512Type() throws IOException {
runTest("t7.json");
}
private void runTest(String jsonFile) throws IOException {
JsonNode json = JsonFromResource.get(jsonFile, getClass());
GraphSchema graphSchema = GraphSchema.fromJson(json.get("schema"));
JsonNode expectedTrainingDataConfig = json.get("config");
Collection<TrainingDataWriterConfigV2> overrideConfig = TrainingDataWriterConfigV2.fromJson(json.get("feature"), NeptuneMLSourceDataModel.PropertyGraph);
Output output = new Output();
new PropertyGraphTrainingDataConfigWriterV2(
graphSchema,
output.generator(),
PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE,
PrinterOptions.NULL_OPTIONS,
overrideConfig.iterator().next()).write();
assertEquals(Output.format(expectedTrainingDataConfig), Output.format(output.allOutput()));
}
}
| 891 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TrainingDataWriterConfigV2Test.java
|
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config;
import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel;
import com.amazonaws.services.neptune.profiles.neptune_ml.JsonFromResource;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.junit.Assert.*;
public class TrainingDataWriterConfigV2Test {
@Test
public void shouldCreateSingleConfig() throws IOException {
JsonNode json = JsonFromResource.get("t1.json", getClass());
Collection<TrainingDataWriterConfigV2> config = TrainingDataWriterConfigV2.fromJson(json.path("neptune_ml"), NeptuneMLSourceDataModel.PropertyGraph);
assertEquals(1, config.size());
}
@Test
public void shouldConfigForEachElementInArray() throws IOException {
JsonNode json = JsonFromResource.get("t2.json", getClass());
Collection<TrainingDataWriterConfigV2> config = TrainingDataWriterConfigV2.fromJson(json.path("neptune_ml"), NeptuneMLSourceDataModel.PropertyGraph);
assertEquals(3, config.size());
}
@Test
public void shouldConfigForEachElementInJobsArray() throws IOException {
JsonNode json = JsonFromResource.get("t3.json", getClass());
Collection<TrainingDataWriterConfigV2> config = TrainingDataWriterConfigV2.fromJson(json.path("neptune_ml"), NeptuneMLSourceDataModel.PropertyGraph);
assertEquals(5, config.size());
}
}
| 892 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseTaskTypeV2Test.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseTaskTypeV2Test {
@Test
public void throwsErrorIfInvalidTaskType(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("task_type", "invalid");
try{
new ParseTaskTypeV2(json, new ParsingContext("context")).parseTaskType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Invalid 'task_type' value for context: 'invalid'. Valid values are: 'link_prediction', 'node_classification', 'node_regression', 'edge_classification', 'edge_regression'.", e.getMessage());
}
}
@Test
public void throwsErrorIfMissingTaskType(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("not_a_task_type", "a_value");
try{
new ParseTaskTypeV2(json, new ParsingContext("context")).parseTaskType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Error parsing 'task_type' field for context. Expected one of the following values: 'link_prediction', 'node_classification', 'node_regression', 'edge_classification', 'edge_regression'.", e.getMessage());
}
}
}
| 893 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseImputerTypeV2Test.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.ImputerTypeV2;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseImputerTypeV2Test {
@Test
public void throwsErrorIfInvalidImputer(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("imputer", "invalid");
try{
new ParseImputerTypeV2(json, new ParsingContext("context")).parseImputerType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Invalid 'imputer' value for context: 'invalid'. Valid values are: 'mean', 'median', 'most-frequent'.", e.getMessage());
}
}
@Test
public void returnsNoneIfImputerMissing(){
ObjectNode json = JsonNodeFactory.instance.objectNode();
ImputerTypeV2 imputerType = new ParseImputerTypeV2(json, new ParsingContext("context")).parseImputerType();
assertEquals(imputerType, ImputerTypeV2.none);
}
}
| 894 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseNormTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class ParseNormTest {
@Test
public void throwsErrorIfInvalidNorm() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("norm", "invalid");
try {
new ParseNorm(json, new ParsingContext("node feature").withLabel(new Label("Person")).withProperty("age")).parseNorm();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertEquals("Invalid 'norm' value for node feature (Label: Person, Property: age): 'invalid'. Valid values are: 'none', 'min-max', 'standard'.", e.getMessage());
}
}
}
| 895 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseNodeTypeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseNodeTypeTest {
@Test
public void canParseSimpleLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("node", "Person");
Label label = new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
assertEquals("Person", label.fullyQualifiedLabel());
}
@Test
public void canParseMultiLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add("Person");
arrayNode.add("Admin");
json.set("node", arrayNode);
Label label = new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
assertEquals("Admin;Person", label.fullyQualifiedLabel());
}
@Test
public void canParseSemicolonSeparatedMultiLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("node", "Person;Admin");
Label label = new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
assertEquals("Admin;Person", label.fullyQualifiedLabel());
}
@Test
public void throwsErrorIfNodeFieldIsMissing() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
try{
new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Error parsing 'node' field for node. Expected a text value or array of text values.", e.getMessage());
}
}
@Test
public void throwsErrorIfNodeFieldIsNotText() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
json.put("node", 1);
try{
new ParseNodeType(json, new ParsingContext("node")).parseNodeType();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e){
assertEquals("Error parsing 'node' field for node. Expected a text value or array of text values.", e.getMessage());
}
}
}
| 896 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseRangeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class ParseRangeTest {
@Test
public void shouldParseRangeFromJson() {
ObjectNode root = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add(1);
arrayNode.add(10L);
root.set("range", arrayNode);
ParseRange parseRange = new ParseRange(root, "range", new ParsingContext("desc"));
Range range = parseRange.parseRange();
assertEquals(1L, range.low());
assertEquals(10L, range.high());
}
@Test
public void shouldParseRangeFromJsonWithHighLowSwitched() {
ObjectNode root = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add(10L);
arrayNode.add(1);
root.set("range", arrayNode);
ParseRange parseRange = new ParseRange(root, "range", new ParsingContext("desc"));
Range range = parseRange.parseRange();
assertEquals(1L, range.low());
assertEquals(10L, range.high());
}
}
| 897 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseEdgeTypeTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseEdgeType;
import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext;
import com.amazonaws.services.neptune.propertygraph.Label;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParseEdgeTypeTest {
@Test
public void shouldParseEdge() throws JsonProcessingException {
String json = "{ \"edge\": [\"person\", \"wrote\", \"post\"]}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
ParseEdgeType parseEdgeType = new ParseEdgeType(jsonNode, new ParsingContext("DESC"));
Label label = parseEdgeType.parseEdgeType();
assertEquals("person", label.fromLabelsAsString());
assertEquals("wrote", label.labelsAsString());
assertEquals("post", label.toLabelsAsString());
}
@Test
public void shouldParseEdgeWithSemicolons() throws JsonProcessingException {
String json = "{ \"edge\": [\"person;admin\", \"wrote\", \"post;content\"]}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
ParseEdgeType parseEdgeType = new ParseEdgeType(jsonNode, new ParsingContext("DESC"));
Label label = parseEdgeType.parseEdgeType();
assertEquals("admin;person", label.fromLabelsAsString());
assertEquals("wrote", label.labelsAsString());
assertEquals("content;post", label.toLabelsAsString());
}
@Test
public void shouldEscapeSemicolons() throws JsonProcessingException {
String json = "{ \"edge\": [\"person;admin\\\\;aa\", \"wrote;x\", \"post;content\"]}";
JsonNode jsonNode = new ObjectMapper().readTree(json);
ParseEdgeType parseEdgeType = new ParseEdgeType(jsonNode, new ParsingContext("DESC"));
Label label = parseEdgeType.parseEdgeType();
assertEquals("admin\\;aa;person", label.fromLabelsAsString());
assertEquals("wrote\\;x", label.labelsAsString());
assertEquals("content;post", label.toLabelsAsString());
}
@Test
public void canParseSimpleEdge() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add("Person");
arrayNode.add("wrote");
arrayNode.add("Post");
json.set("edge", arrayNode);
Label label = new ParseEdgeType(json, new ParsingContext("edge")).parseEdgeType();
assertEquals("(Person)-wrote-(Post)", label.fullyQualifiedLabel());
}
@Test
public void canParseComplexEdge() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add(arrayFrom("Person", "Admin"));
arrayNode.add("wrote");
arrayNode.add(arrayFrom("Post", "Content"));
json.set("edge", arrayNode);
Label label = new ParseEdgeType(json, new ParsingContext("edge")).parseEdgeType();
assertEquals("(Admin;Person)-wrote-(Content;Post)", label.fullyQualifiedLabel());
}
@Test
public void canParseEdgeWithOneComplexLabel() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
arrayNode.add("Person"); // Simple
arrayNode.add("wrote");
arrayNode.add(arrayFrom("Post", "Content")); // Complex
json.set("edge", arrayNode);
Label label = new ParseEdgeType(json, new ParsingContext("edge")).parseEdgeType();
assertEquals("(Person)-wrote-(Content;Post)", label.fullyQualifiedLabel());
}
private ArrayNode arrayFrom(String... values){
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
for (String value : values) {
arrayNode.add(value);
}
return arrayNode;
}
}
| 898 |
0 |
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune
|
Create_ds/neptune-export/src/test/java/com/amazonaws/services/neptune/export/ParamConverterTest.java
|
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package com.amazonaws.services.neptune.export;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
public class ParamConverterTest {
@Test
public void shouldConvertCamelCaseToDashDelimitedLowerCase(){
assertEquals("my-long-args", ParamConverter.toCliArg("myLongArgs"));
}
@Test
public void shouldSingularizeValue(){
assertEquals("endpoint", ParamConverter.singularize("endpoints"));
assertEquals("name", ParamConverter.singularize("name"));
assertEquals("query", ParamConverter.singularize("queries"));
}
@Test
public void shouldConvertParams() throws JsonProcessingException {
String json = "{\n" +
" \"endpoints\": [\"endpoint1\", \"endpoint2\"],\n" +
" \"profile\": \"neptune_ml\",\n" +
" \"useIamAuth\": true,\n" +
" \"cloneCluster\": true,\n" +
" \"cloneClusterReplicaCount\": 2\n" +
" }";
JsonNode jsonNode = new ObjectMapper().readTree(json);
Args args = ParamConverter.fromJson("export-pg", jsonNode);
assertEquals("export-pg --endpoint 'endpoint1' --endpoint 'endpoint2' --profile 'neptune_ml' --use-iam-auth --clone-cluster --clone-cluster-replica-count 2", args.toString());
}
}
| 899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.