index
int64 0
0
| repo_id
stringlengths 9
205
| file_path
stringlengths 31
246
| content
stringlengths 1
12.2M
| __index_level_0__
int64 0
10k
|
---|---|---|---|---|
0 |
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys
|
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys/model/OpaqueError.java
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Do not modify this file. This file is machine generated, and any changes to it will be overwritten.
package software.amazon.cryptography.materialproviderstestvectorkeys.model;
public class OpaqueError extends RuntimeException {
/**
* The unexpected object encountered. It MIGHT BE an Exception, but that is not guaranteed.
*/
private final Object obj;
protected OpaqueError(BuilderImpl builder) {
super(messageFromBuilder(builder), builder.cause());
this.obj = builder.obj();
}
private static String messageFromBuilder(Builder builder) {
if (builder.message() != null) {
return builder.message();
}
if (builder.cause() != null) {
return builder.cause().getMessage();
}
return null;
}
/**
* See {@link Throwable#getMessage()}.
*/
public String message() {
return this.getMessage();
}
/**
* See {@link Throwable#getCause()}.
*/
public Throwable cause() {
return this.getCause();
}
/**
* @return The unexpected object encountered. It MIGHT BE an Exception, but that is not guaranteed.
*/
public Object obj() {
return this.obj;
}
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public interface Builder {
/**
* @param message The detailed message. The detail message is saved for later retrieval by the {@link #getMessage()} method.
*/
Builder message(String message);
/**
* @return The detailed message. The detail message is saved for later retrieval by the {@link #getMessage()} method.
*/
String message();
/**
* @param cause The cause (which is saved for later retrieval by the {@link #getCause()} method). (A {@code null} value is permitted, and indicates that the cause is nonexistent or unknown.)
*/
Builder cause(Throwable cause);
/**
* @return The cause (which is saved for later retrieval by the {@link #getCause()} method). (A {@code null} value is permitted, and indicates that the cause is nonexistent or unknown.)
*/
Throwable cause();
/**
* @param obj The unexpected object encountered. It MIGHT BE an Exception, but that is not guaranteed.
*/
Builder obj(Object obj);
/**
* @return The unexpected object encountered. It MIGHT BE an Exception, but that is not guaranteed.
*/
Object obj();
OpaqueError build();
}
static class BuilderImpl implements Builder {
protected String message;
protected Throwable cause;
protected Object obj;
protected BuilderImpl() {
}
protected BuilderImpl(OpaqueError model) {
this.cause = model.getCause();
this.message = model.getMessage();
this.obj = model.obj();
}
public Builder message(String message) {
this.message = message;
return this;
}
public String message() {
return this.message;
}
public Builder cause(Throwable cause) {
this.cause = cause;
return this;
}
public Throwable cause() {
return this.cause;
}
public Builder obj(Object obj) {
this.obj = obj;
return this;
}
public Object obj() {
return this.obj;
}
public OpaqueError build() {
if (this.obj != null && this.cause == null && this.obj instanceof Throwable) {
this.cause = (Throwable) this.obj;
} else if (this.obj == null && this.cause != null) {
this.obj = this.cause;
}
return new OpaqueError(this);
}
}
}
| 3,200 |
0 |
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys
|
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys/model/SerializeKeyDescriptionInput.java
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Do not modify this file. This file is machine generated, and any changes to it will be overwritten.
package software.amazon.cryptography.materialproviderstestvectorkeys.model;
import java.util.Objects;
public class SerializeKeyDescriptionInput {
private final KeyDescription keyDescription;
protected SerializeKeyDescriptionInput(BuilderImpl builder) {
this.keyDescription = builder.keyDescription();
}
public KeyDescription keyDescription() {
return this.keyDescription;
}
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public interface Builder {
Builder keyDescription(KeyDescription keyDescription);
KeyDescription keyDescription();
SerializeKeyDescriptionInput build();
}
static class BuilderImpl implements Builder {
protected KeyDescription keyDescription;
protected BuilderImpl() {
}
protected BuilderImpl(SerializeKeyDescriptionInput model) {
this.keyDescription = model.keyDescription();
}
public Builder keyDescription(KeyDescription keyDescription) {
this.keyDescription = keyDescription;
return this;
}
public KeyDescription keyDescription() {
return this.keyDescription;
}
public SerializeKeyDescriptionInput build() {
if (Objects.isNull(this.keyDescription())) {
throw new IllegalArgumentException("Missing value for required field `keyDescription`");
}
return new SerializeKeyDescriptionInput(this);
}
}
}
| 3,201 |
0 |
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys
|
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys/model/GetKeyDescriptionInput.java
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Do not modify this file. This file is machine generated, and any changes to it will be overwritten.
package software.amazon.cryptography.materialproviderstestvectorkeys.model;
import java.nio.ByteBuffer;
import java.util.Objects;
public class GetKeyDescriptionInput {
private final ByteBuffer json;
protected GetKeyDescriptionInput(BuilderImpl builder) {
this.json = builder.json();
}
public ByteBuffer json() {
return this.json;
}
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public interface Builder {
Builder json(ByteBuffer json);
ByteBuffer json();
GetKeyDescriptionInput build();
}
static class BuilderImpl implements Builder {
protected ByteBuffer json;
protected BuilderImpl() {
}
protected BuilderImpl(GetKeyDescriptionInput model) {
this.json = model.json();
}
public Builder json(ByteBuffer json) {
this.json = json;
return this;
}
public ByteBuffer json() {
return this.json;
}
public GetKeyDescriptionInput build() {
if (Objects.isNull(this.json())) {
throw new IllegalArgumentException("Missing value for required field `json`");
}
return new GetKeyDescriptionInput(this);
}
}
}
| 3,202 |
0 |
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys
|
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviderstestvectorkeys/model/CollectionOfErrors.java
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Do not modify this file. This file is machine generated, and any changes to it will be overwritten.
package software.amazon.cryptography.materialproviderstestvectorkeys.model;
import java.util.List;
public class CollectionOfErrors extends RuntimeException {
/**
* The list of Exceptions encountered.
*/
private final List<RuntimeException> list;
protected CollectionOfErrors(BuilderImpl builder) {
super(messageFromBuilder(builder), builder.cause());
this.list = builder.list();
}
private static String messageFromBuilder(Builder builder) {
if (builder.message() != null) {
return builder.message();
}
if (builder.cause() != null) {
return builder.cause().getMessage();
}
return null;
}
/**
* See {@link Throwable#getMessage()}.
*/
public String message() {
return this.getMessage();
}
/**
* See {@link Throwable#getCause()}.
*/
public Throwable cause() {
return this.getCause();
}
/**
* @return The list of Exceptions encountered.
*/
public List<RuntimeException> list() {
return this.list;
}
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public interface Builder {
/**
* @param message The detailed message. The detail message is saved for later retrieval by the {@link #getMessage()} method.
*/
Builder message(String message);
/**
* @return The detailed message. The detail message is saved for later retrieval by the {@link #getMessage()} method.
*/
String message();
/**
* @param cause The cause (which is saved for later retrieval by the {@link #getCause()} method). (A {@code null} value is permitted, and indicates that the cause is nonexistent or unknown.)
*/
Builder cause(Throwable cause);
/**
* @return The cause (which is saved for later retrieval by the {@link #getCause()} method). (A {@code null} value is permitted, and indicates that the cause is nonexistent or unknown.)
*/
Throwable cause();
/**
* @param list The list of Exceptions encountered.
*/
Builder list(List<RuntimeException> list);
/**
* @return The list of Exceptions encountered.
*/
List<RuntimeException> list();
CollectionOfErrors build();
}
static class BuilderImpl implements Builder {
protected String message;
protected Throwable cause;
protected List<RuntimeException> list;
protected BuilderImpl() {
}
protected BuilderImpl(CollectionOfErrors model) {
this.cause = model.getCause();
this.message = model.getMessage();
this.list = model.list();
}
public Builder message(String message) {
this.message = message;
return this;
}
public String message() {
return this.message;
}
public Builder cause(Throwable cause) {
this.cause = cause;
return this;
}
public Throwable cause() {
return this.cause;
}
public Builder list(List<RuntimeException> list) {
this.list = list;
return this;
}
public List<RuntimeException> list() {
return this.list;
}
public CollectionOfErrors build() {
return new CollectionOfErrors(this);
}
}
}
| 3,203 |
0 |
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviders
|
Create_ds/aws-cryptographic-material-providers-library-java/TestVectorsAwsCryptographicMaterialProviders/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/materialproviders/wrapped/TestMaterialProviders.java
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Do not modify this file. This file is machine generated, and any changes to it will be overwritten.
package software.amazon.cryptography.materialproviders.wrapped;
import Wrappers_Compile.Result;
import dafny.DafnySequence;
import dafny.Tuple0;
import java.lang.Byte;
import java.lang.IllegalArgumentException;
import java.lang.RuntimeException;
import java.nio.ByteBuffer;
import java.util.Objects;
import software.amazon.cryptography.materialproviders.MaterialProviders;
import software.amazon.cryptography.materialproviders.ToDafny;
import software.amazon.cryptography.materialproviders.ToNative;
import software.amazon.cryptography.materialproviders.internaldafny.types.AlgorithmSuiteInfo;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsDiscoveryKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsDiscoveryMultiKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsHierarchicalKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsMrkDiscoveryKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsMrkDiscoveryMultiKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsMrkKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsMrkMultiKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsMultiKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateAwsKmsRsaKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateCryptographicMaterialsCacheInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateDefaultClientSupplierInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateDefaultCryptographicMaterialsManagerInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateMultiKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateRawAesKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateRawRsaKeyringInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.CreateRequiredEncryptionContextCMMInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.DecryptionMaterials;
import software.amazon.cryptography.materialproviders.internaldafny.types.EncryptionMaterials;
import software.amazon.cryptography.materialproviders.internaldafny.types.Error;
import software.amazon.cryptography.materialproviders.internaldafny.types.IAwsCryptographicMaterialProvidersClient;
import software.amazon.cryptography.materialproviders.internaldafny.types.IClientSupplier;
import software.amazon.cryptography.materialproviders.internaldafny.types.ICryptographicMaterialsCache;
import software.amazon.cryptography.materialproviders.internaldafny.types.ICryptographicMaterialsManager;
import software.amazon.cryptography.materialproviders.internaldafny.types.IKeyring;
import software.amazon.cryptography.materialproviders.internaldafny.types.InitializeDecryptionMaterialsInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.InitializeEncryptionMaterialsInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.ValidDecryptionMaterialsTransitionInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.ValidEncryptionMaterialsTransitionInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.ValidateCommitmentPolicyOnDecryptInput;
import software.amazon.cryptography.materialproviders.internaldafny.types.ValidateCommitmentPolicyOnEncryptInput;
public class TestMaterialProviders implements IAwsCryptographicMaterialProvidersClient {
private final MaterialProviders _impl;
protected TestMaterialProviders(BuilderImpl builder) {
this._impl = builder.impl();
}
public static Builder builder() {
return new BuilderImpl();
}
public Result<IKeyring, Error> CreateAwsKmsDiscoveryKeyring(
CreateAwsKmsDiscoveryKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsDiscoveryKeyringInput nativeInput = ToNative.CreateAwsKmsDiscoveryKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsDiscoveryKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsDiscoveryMultiKeyring(
CreateAwsKmsDiscoveryMultiKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsDiscoveryMultiKeyringInput nativeInput = ToNative.CreateAwsKmsDiscoveryMultiKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsDiscoveryMultiKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsHierarchicalKeyring(
CreateAwsKmsHierarchicalKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsHierarchicalKeyringInput nativeInput = ToNative.CreateAwsKmsHierarchicalKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsHierarchicalKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsKeyring(CreateAwsKmsKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsKeyringInput nativeInput = ToNative.CreateAwsKmsKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsMrkDiscoveryKeyring(
CreateAwsKmsMrkDiscoveryKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsMrkDiscoveryKeyringInput nativeInput = ToNative.CreateAwsKmsMrkDiscoveryKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsMrkDiscoveryKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsMrkDiscoveryMultiKeyring(
CreateAwsKmsMrkDiscoveryMultiKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsMrkDiscoveryMultiKeyringInput nativeInput = ToNative.CreateAwsKmsMrkDiscoveryMultiKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsMrkDiscoveryMultiKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsMrkKeyring(CreateAwsKmsMrkKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsMrkKeyringInput nativeInput = ToNative.CreateAwsKmsMrkKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsMrkKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsMrkMultiKeyring(
CreateAwsKmsMrkMultiKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsMrkMultiKeyringInput nativeInput = ToNative.CreateAwsKmsMrkMultiKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsMrkMultiKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsMultiKeyring(
CreateAwsKmsMultiKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsMultiKeyringInput nativeInput = ToNative.CreateAwsKmsMultiKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsMultiKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateAwsKmsRsaKeyring(CreateAwsKmsRsaKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateAwsKmsRsaKeyringInput nativeInput = ToNative.CreateAwsKmsRsaKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateAwsKmsRsaKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<ICryptographicMaterialsCache, Error> CreateCryptographicMaterialsCache(
CreateCryptographicMaterialsCacheInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateCryptographicMaterialsCacheInput nativeInput = ToNative.CreateCryptographicMaterialsCacheInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.ICryptographicMaterialsCache nativeOutput = this._impl.CreateCryptographicMaterialsCache(nativeInput);
ICryptographicMaterialsCache dafnyOutput = ToDafny.CryptographicMaterialsCache(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IClientSupplier, Error> CreateDefaultClientSupplier(
CreateDefaultClientSupplierInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateDefaultClientSupplierInput nativeInput = ToNative.CreateDefaultClientSupplierInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IClientSupplier nativeOutput = this._impl.CreateDefaultClientSupplier(nativeInput);
IClientSupplier dafnyOutput = ToDafny.ClientSupplier(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<ICryptographicMaterialsManager, Error> CreateDefaultCryptographicMaterialsManager(
CreateDefaultCryptographicMaterialsManagerInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateDefaultCryptographicMaterialsManagerInput nativeInput = ToNative.CreateDefaultCryptographicMaterialsManagerInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.ICryptographicMaterialsManager nativeOutput = this._impl.CreateDefaultCryptographicMaterialsManager(nativeInput);
ICryptographicMaterialsManager dafnyOutput = ToDafny.CryptographicMaterialsManager(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateMultiKeyring(CreateMultiKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateMultiKeyringInput nativeInput = ToNative.CreateMultiKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateMultiKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateRawAesKeyring(CreateRawAesKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateRawAesKeyringInput nativeInput = ToNative.CreateRawAesKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateRawAesKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<IKeyring, Error> CreateRawRsaKeyring(CreateRawRsaKeyringInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateRawRsaKeyringInput nativeInput = ToNative.CreateRawRsaKeyringInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.IKeyring nativeOutput = this._impl.CreateRawRsaKeyring(nativeInput);
IKeyring dafnyOutput = ToDafny.Keyring(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<ICryptographicMaterialsManager, Error> CreateRequiredEncryptionContextCMM(
CreateRequiredEncryptionContextCMMInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.CreateRequiredEncryptionContextCMMInput nativeInput = ToNative.CreateRequiredEncryptionContextCMMInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.ICryptographicMaterialsManager nativeOutput = this._impl.CreateRequiredEncryptionContextCMM(nativeInput);
ICryptographicMaterialsManager dafnyOutput = ToDafny.CryptographicMaterialsManager(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> DecryptionMaterialsWithPlaintextDataKey(
DecryptionMaterials dafnyInput) {
software.amazon.cryptography.materialproviders.model.DecryptionMaterials nativeInput = ToNative.DecryptionMaterials(dafnyInput);
try {
this._impl.DecryptionMaterialsWithPlaintextDataKey(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> EncryptionMaterialsHasPlaintextDataKey(
EncryptionMaterials dafnyInput) {
software.amazon.cryptography.materialproviders.model.EncryptionMaterials nativeInput = ToNative.EncryptionMaterials(dafnyInput);
try {
this._impl.EncryptionMaterialsHasPlaintextDataKey(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<AlgorithmSuiteInfo, Error> GetAlgorithmSuiteInfo(
DafnySequence<? extends Byte> dafnyInput) {
ByteBuffer nativeInput = software.amazon.smithy.dafny.conversion.ToNative.Simple.ByteBuffer(dafnyInput);
try {
software.amazon.cryptography.materialproviders.model.AlgorithmSuiteInfo nativeOutput = this._impl.GetAlgorithmSuiteInfo(nativeInput);
AlgorithmSuiteInfo dafnyOutput = ToDafny.AlgorithmSuiteInfo(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<DecryptionMaterials, Error> InitializeDecryptionMaterials(
InitializeDecryptionMaterialsInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.InitializeDecryptionMaterialsInput nativeInput = ToNative.InitializeDecryptionMaterialsInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.model.DecryptionMaterials nativeOutput = this._impl.InitializeDecryptionMaterials(nativeInput);
DecryptionMaterials dafnyOutput = ToDafny.DecryptionMaterials(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<EncryptionMaterials, Error> InitializeEncryptionMaterials(
InitializeEncryptionMaterialsInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.InitializeEncryptionMaterialsInput nativeInput = ToNative.InitializeEncryptionMaterialsInput(dafnyInput);
try {
software.amazon.cryptography.materialproviders.model.EncryptionMaterials nativeOutput = this._impl.InitializeEncryptionMaterials(nativeInput);
EncryptionMaterials dafnyOutput = ToDafny.EncryptionMaterials(nativeOutput);
return Result.create_Success(dafnyOutput);
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> ValidAlgorithmSuiteInfo(AlgorithmSuiteInfo dafnyInput) {
software.amazon.cryptography.materialproviders.model.AlgorithmSuiteInfo nativeInput = ToNative.AlgorithmSuiteInfo(dafnyInput);
try {
this._impl.ValidAlgorithmSuiteInfo(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> ValidateCommitmentPolicyOnDecrypt(
ValidateCommitmentPolicyOnDecryptInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.ValidateCommitmentPolicyOnDecryptInput nativeInput = ToNative.ValidateCommitmentPolicyOnDecryptInput(dafnyInput);
try {
this._impl.ValidateCommitmentPolicyOnDecrypt(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> ValidateCommitmentPolicyOnEncrypt(
ValidateCommitmentPolicyOnEncryptInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.ValidateCommitmentPolicyOnEncryptInput nativeInput = ToNative.ValidateCommitmentPolicyOnEncryptInput(dafnyInput);
try {
this._impl.ValidateCommitmentPolicyOnEncrypt(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> ValidDecryptionMaterialsTransition(
ValidDecryptionMaterialsTransitionInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.ValidDecryptionMaterialsTransitionInput nativeInput = ToNative.ValidDecryptionMaterialsTransitionInput(dafnyInput);
try {
this._impl.ValidDecryptionMaterialsTransition(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public Result<Tuple0, Error> ValidEncryptionMaterialsTransition(
ValidEncryptionMaterialsTransitionInput dafnyInput) {
software.amazon.cryptography.materialproviders.model.ValidEncryptionMaterialsTransitionInput nativeInput = ToNative.ValidEncryptionMaterialsTransitionInput(dafnyInput);
try {
this._impl.ValidEncryptionMaterialsTransition(nativeInput);
return Result.create_Success(Tuple0.create());
} catch (RuntimeException ex) {
return Result.create_Failure(ToDafny.Error(ex));
}
}
public interface Builder {
Builder impl(MaterialProviders impl);
MaterialProviders impl();
TestMaterialProviders build();
}
static class BuilderImpl implements Builder {
protected MaterialProviders impl;
protected BuilderImpl() {
}
public Builder impl(MaterialProviders impl) {
this.impl = impl;
return this;
}
public MaterialProviders impl() {
return this.impl;
}
public TestMaterialProviders build() {
if (Objects.isNull(this.impl())) {
throw new IllegalArgumentException("Missing value for required field `impl`");
}
return new TestMaterialProviders(this);
}
}
}
| 3,204 |
0 |
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas
|
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas/ptp/TestPtpGuiceModule.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.ptp;
import com.google.inject.AbstractModule;
import com.netflix.staash.mesh.InstanceRegistry;
import com.netflix.staash.mesh.client.ClientFactory;
import com.netflix.staash.mesh.client.memory.MemoryClientFactory;
import com.netflix.staash.mesh.db.TopicRegistry;
import com.netflix.staash.mesh.endpoints.ChordEndpointPolicy;
import com.netflix.staash.mesh.endpoints.EndpointPolicy;
import com.netflix.staash.mesh.seed.TopicSeed;
public class TestPtpGuiceModule extends AbstractModule {
@Override
protected void configure() {
bind(TopicSeed.class).to(DummyTopicSeed.class);
bind(TopicRegistry.class).asEagerSingleton();
bind(InstanceRegistry.class).asEagerSingleton();
bind(ClientFactory.class).to(MemoryClientFactory.class).asEagerSingleton();
bind(EndpointPolicy.class).to(ChordEndpointPolicy.class).asEagerSingleton();
// bind(TopicFactory.class).to(MemoryTopicFactory());
}
}
| 3,205 |
0 |
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas
|
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas/ptp/TestDriver.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.ptp;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import com.netflix.staash.mesh.InstanceRegistry;
import com.netflix.staash.mesh.client.ClientFactory;
import com.netflix.staash.mesh.client.memory.MemoryClientFactory;
import com.netflix.staash.mesh.db.Entry;
import com.netflix.staash.mesh.db.TopicRegistry;
import com.netflix.staash.mesh.db.memory.MemoryTopicFactory;
import com.netflix.staash.mesh.endpoints.ChordEndpointPolicy;
import com.netflix.staash.mesh.endpoints.EndpointPolicy;
import com.netflix.staash.mesh.server.Server;
public class TestDriver {
public static void main(String[] args) {
final ScheduledExecutorService executor = Executors.newScheduledThreadPool(10);
final TopicRegistry topics = new TopicRegistry(new MemoryTopicFactory());
final InstanceRegistry registry = new InstanceRegistry();
final ClientFactory factory = new MemoryClientFactory();
final EndpointPolicy endpointPolicy = new ChordEndpointPolicy();
topics.createTopic("test");
topics.addEntry("test", new Entry("Key1", "Value1", System.currentTimeMillis()));
topics.addEntry("test", new Entry("Key2", "Value2", System.currentTimeMillis()));
final AtomicInteger counter = new AtomicInteger();
final int instanceCount = 10;
final int asgCount = 10;
final long asgCreateDelay = 5;
// // Thread to add random server
// executor.execute(new Runnable() {
// @Override
// public void run() {
// long id = counter.incrementAndGet();
// if (id < asgCount) {
// for (int i = 0; i < instanceCount; i++) {
// try {
// Server server = new Server(registry, factory, endpointPolicy, "" + (id * instanceCount + i));
// server.start();
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// }
// executor.schedule(this, asgCreateDelay, TimeUnit.SECONDS);
// }
// }
// });
//
// executor.scheduleAtFixedRate(new Runnable() {
// @Override
// public void run() {
// }
// }, 10, 10, TimeUnit.SECONDS);
//
// try {
// Thread.sleep(100000);
// } catch (InterruptedException e) {
// }
}
}
| 3,206 |
0 |
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas
|
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas/ptp/RandomServerProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.ptp;
import java.util.concurrent.atomic.AtomicLong;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.netflix.staash.mesh.InstanceInfo;
import com.netflix.staash.mesh.InstanceRegistry;
import com.netflix.staash.mesh.client.ClientFactory;
import com.netflix.staash.mesh.endpoints.EndpointPolicy;
import com.netflix.staash.mesh.server.MemoryServer;
import com.netflix.staash.mesh.server.Server;
import com.netflix.staash.mesh.server.ServerFactory;
public class RandomServerProvider implements ServerFactory {
private final InstanceRegistry instanceRegistry;
private final ClientFactory clientFactory;
private final EndpointPolicy endpointPolicy;
private final AtomicLong counter = new AtomicLong();
@Inject
public RandomServerProvider(InstanceRegistry instanceRegistry, ClientFactory clientFactory, EndpointPolicy endpointPolicy) {
this.instanceRegistry = instanceRegistry;
this.clientFactory = clientFactory;
this.endpointPolicy = endpointPolicy;
}
@Override
public Server createServer(InstanceInfo ii) {
return new MemoryServer(instanceRegistry, clientFactory, endpointPolicy, "" + counter.incrementAndGet());
}
}
| 3,207 |
0 |
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas
|
Create_ds/staash/staash-mesh/src/test/java/com/netflix/paas/ptp/DummyTopicSeed.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.ptp;
import com.netflix.staash.mesh.db.Entry;
import com.netflix.staash.mesh.db.TopicRegistry;
import com.netflix.staash.mesh.db.memory.MemoryTopicFactory;
import com.netflix.staash.mesh.seed.TopicSeed;
public class DummyTopicSeed implements TopicSeed {
private static final String TOPIC_NAME = "test";
public DummyTopicSeed() {
final TopicRegistry topics = new TopicRegistry(new MemoryTopicFactory());
topics.createTopic(TOPIC_NAME);
topics.addEntry(TOPIC_NAME, new Entry("Key1", "Value1", System.currentTimeMillis()));
topics.addEntry(TOPIC_NAME, new Entry("Key2", "Value2", System.currentTimeMillis()));
}
}
| 3,208 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/CompareInstanceInfoByUuid.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh;
import java.util.Comparator;
public class CompareInstanceInfoByUuid implements Comparator<InstanceInfo>{
@Override
public int compare(InstanceInfo arg0, InstanceInfo arg1) {
return arg0.getUuid().compareTo(arg1.getUuid());
}
}
| 3,209 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/InstanceInfo.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh;
import java.util.UUID;
public class InstanceInfo {
private final UUID uuid;
private final String id;
public InstanceInfo(String id, UUID uuid) {
this.uuid = uuid;
this.id = id;
}
public UUID getUuid() {
return uuid;
}
public String getId() {
return id;
}
public String toString() {
return uuid.toString() + "(" + id + ")";
}
}
| 3,210 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/InstanceRegistry.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* Registry for all active instances. Keeps track of a sorted list of instances.
*
* @author elandau
*
*/
public class InstanceRegistry {
private final Map<UUID, InstanceInfo> members = Maps.newHashMap();
private final AtomicReference<List<InstanceInfo>> ring = new AtomicReference<List<InstanceInfo>>(new ArrayList<InstanceInfo>());
private static final CompareInstanceInfoByUuid comparator = new CompareInstanceInfoByUuid();
/**
* A new instance has joined the ring
* @param node
*/
public synchronized void join(InstanceInfo node) {
members.put(node.getUuid(), node);
update();
}
/**
* An instance was removed from the the ring
* @param node
*/
public synchronized void leave(InstanceInfo node) {
members.remove(node.getUuid());
update();
}
/**
* Resort the ring
*/
private void update() {
List<InstanceInfo> list = Lists.newArrayList(members.values());
Collections.sort(list, comparator);
ring.set(list);
}
/**
* Return a sorted list of InstanceInfo. Sorted by UUID.
* @return
*/
public List<InstanceInfo> getMembers() {
return ring.get();
}
}
| 3,211 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/seed/TopicSeed.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.seed;
public interface TopicSeed {
}
| 3,212 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/messages/Message.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.messages;
public class Message {
private final Verb verb;
public Message(Verb verb) {
this.verb = verb;
}
public Verb getVerb() {
return this.verb;
}
}
| 3,213 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/messages/RequestHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.messages;
public interface RequestHandler {
void onMessage(Message message, AsyncResponse response);
}
| 3,214 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/messages/ResponseHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.messages;
public interface ResponseHandler {
}
| 3,215 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/messages/Verb.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.messages;
public enum Verb {
DIGEST_REQUEST,
DIGEST_RESPONSE,
DATA_REQUEST,
DATA_RESPONSE,
DATA_PUSH,
}
| 3,216 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/messages/AsyncResponse.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.messages;
public interface AsyncResponse {
void sendResponse(Message message, Message response);
}
| 3,217 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/endpoints/ChordEndpointPolicy.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.endpoints;
import java.util.Collections;
import java.util.List;
import com.google.common.collect.Lists;
import com.netflix.staash.mesh.CompareInstanceInfoByUuid;
import com.netflix.staash.mesh.InstanceInfo;
/**
* Return a list of endpoints that are of exponential distance from the current position
*
* Example,
*
* pos + 1
* pos + 2
* pos + 4
* pos + 8
* ...
*
* @author elandau
*
*/
public class ChordEndpointPolicy implements EndpointPolicy {
private static final CompareInstanceInfoByUuid comparator = new CompareInstanceInfoByUuid();
private static double LOG_2 = Math.log(2);
@Override
public List<InstanceInfo> getEndpoints(InstanceInfo current, List<InstanceInfo> instances) {
int position = Collections.binarySearch(instances, current, comparator);
int size = instances.size();
int count = (int)Math.ceil(Math.log(size) / LOG_2);
List<InstanceInfo> endpoints = Lists.newArrayListWithCapacity(count);
int offset = 1;
for (int i = 0; i < count; i++) {
endpoints.add(instances.get((position + offset) % size));
offset *= 2;
}
return endpoints;
}
}
| 3,218 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/endpoints/CircularEndpointPolicy.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.endpoints;
import java.util.Collections;
import java.util.List;
import com.google.common.collect.Lists;
import com.netflix.staash.mesh.CompareInstanceInfoByUuid;
import com.netflix.staash.mesh.InstanceInfo;
public class CircularEndpointPolicy implements EndpointPolicy {
private static final CompareInstanceInfoByUuid comparator = new CompareInstanceInfoByUuid();
private final int replicationFactor;
public CircularEndpointPolicy(int replicationFactor) {
this.replicationFactor = replicationFactor;
}
@Override
public List<InstanceInfo> getEndpoints(InstanceInfo current, List<InstanceInfo> instances) {
int position = Collections.binarySearch(instances, current, comparator);
int size = instances.size();
List<InstanceInfo> endpoints = Lists.newArrayListWithCapacity(replicationFactor);
int count = Math.max(size-1, replicationFactor);
for (int i = 0; i < count; i++) {
endpoints.add(instances.get((position + i) % size));
}
return endpoints;
}
}
| 3,219 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/endpoints/EndpointPolicy.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.endpoints;
import java.util.List;
import com.netflix.staash.mesh.InstanceInfo;
public interface EndpointPolicy {
List<InstanceInfo> getEndpoints(InstanceInfo current, List<InstanceInfo> instances);
}
| 3,220 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/jersey/MeshServerResource.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.jersey;
import java.util.List;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@Path("/1/mesh")
public class MeshServerResource {
/**
* Return the list of topics and their metadata
* @return
*/
@GET
@Path("topic")
public List<String> getTopics() {
return null;
}
@GET
@Path("topic/{name}")
public void getTopicData(@PathParam("name") String topicName) {
}
/**
*
* @param topicName
*/
@POST
@Path("topic/{name}")
public void postTopic(@PathParam("name") String topicName) {
}
@POST
@Path("topic/{name}/{key}")
public void postTopicKey(@PathParam("name") String topicName, @PathParam("key") String key) {
}
@DELETE
@Path("topic/{name}")
public void deleteTopic(@PathParam("name") String topicName) {
}
@DELETE
@Path("topic/{name}/{key}")
public void deleteTopicKey(@PathParam("name") String topicName, @PathParam("key") String key) {
}
}
| 3,221 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/jersey/JsonMessageBodyReader.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.jersey;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
@Consumes({"application/json"})
@Provider
public class JsonMessageBodyReader implements MessageBodyReader<Object> {
private final ObjectMapper mapper;
public JsonMessageBodyReader() {
mapper = new ObjectMapper();
mapper.getSerializationConfig().setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
}
@Override
public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return mapper.canSerialize(type);
}
@Override
public Object readFrom(Class<Object> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> arg4, InputStream is) throws IOException, WebApplicationException {
return mapper.readValue(is, type);
}
}
| 3,222 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/jersey/JsonMessageBodyWriter.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.jersey;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
/**
* A MessageBodyWriter implementation that uses Jackson ObjectMapper to serialize objects to JSON.
*/
@Produces({"application/json"})
@Provider
public class JsonMessageBodyWriter implements MessageBodyWriter<Object> {
private final ObjectMapper mapper;
public JsonMessageBodyWriter() {
mapper = new ObjectMapper();
mapper.getSerializationConfig().setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
}
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return mapper.canSerialize(type);
}
public long getSize(Object data, Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return -1;
}
public void writeTo(Object data, Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType, MultivaluedMap<String, Object> headers, OutputStream out)
throws IOException, WebApplicationException {
mapper.writeValue(out, data);
out.flush();
}
}
| 3,223 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/MemoryServer.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.UUID;
import java.util.Map.Entry;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.inject.Inject;
import com.netflix.staash.mesh.CompareInstanceInfoByUuid;
import com.netflix.staash.mesh.InstanceInfo;
import com.netflix.staash.mesh.InstanceRegistry;
import com.netflix.staash.mesh.client.Client;
import com.netflix.staash.mesh.client.ClientFactory;
import com.netflix.staash.mesh.db.TopicRegistry;
import com.netflix.staash.mesh.db.memory.MemoryTopicFactory;
import com.netflix.staash.mesh.endpoints.EndpointPolicy;
import com.netflix.staash.mesh.messages.AsyncResponse;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.RequestHandler;
import com.netflix.staash.mesh.messages.Verb;
import com.netflix.staash.mesh.server.handlers.DataPushHandler;
import com.netflix.staash.mesh.server.handlers.DataRequestHandler;
import com.netflix.staash.mesh.server.handlers.DataResponseHandler;
import com.netflix.staash.mesh.server.handlers.DigestRequestHandler;
import com.netflix.staash.mesh.server.handlers.DigestResponseHandler;
public class MemoryServer implements Server, RequestHandler {
private static final CompareInstanceInfoByUuid comparator = new CompareInstanceInfoByUuid();
private static final AtomicLong changeCounter = new AtomicLong();
private final InstanceRegistry instanceRegistry;
private final ClientFactory clientFactory;
private final ScheduledExecutorService executor = Executors.newScheduledThreadPool(2);
private final InstanceInfo instanceInfo;
private final EndpointPolicy endpointPolicy;
private final SortedMap<InstanceInfo, Client> peers = Maps.newTreeMap(comparator);
private long generationCounter = 0;
private Map<Verb, RequestHandler> verbHandlers = Maps.newEnumMap(Verb.class);
private TopicRegistry topics = new TopicRegistry(new MemoryTopicFactory());
@Inject
public MemoryServer(InstanceRegistry instanceRegistry, ClientFactory clientFactory, EndpointPolicy endpointPolicy, String id) {
this.instanceRegistry = instanceRegistry;
this.clientFactory = clientFactory;
this.instanceInfo = new InstanceInfo(id, UUID.randomUUID());
this.endpointPolicy = endpointPolicy;
verbHandlers.put(Verb.DATA_PUSH, new DataPushHandler());
verbHandlers.put(Verb.DATA_REQUEST, new DataRequestHandler());
verbHandlers.put(Verb.DATA_RESPONSE, new DataResponseHandler());
verbHandlers.put(Verb.DIGEST_REQUEST, new DigestRequestHandler());
verbHandlers.put(Verb.DIGEST_RESPONSE, new DigestResponseHandler());
}
public void start() {
System.out.println("Starting " + instanceInfo);
this.instanceRegistry.join(instanceInfo);
// executor.scheduleAtFixedRate(
// new RefreshRingRunnable(this, instanceRegistry),
// 10, 10, TimeUnit.SECONDS);
}
public void stop() {
System.out.println("Stopping " + instanceInfo);
this.instanceRegistry.leave(instanceInfo);
}
/**
* Update the list of all members in the ring
* @param ring
*/
public void setMembers(List<InstanceInfo> ring) {
List<InstanceInfo> instances = endpointPolicy.getEndpoints(instanceInfo, ring);
Collections.sort(instances, comparator);
List<InstanceInfo> toRemove = Lists.newArrayList();
List<InstanceInfo> toAdd = Lists.newArrayList();
List<InstanceInfo> toDisconnect = Lists.newArrayList();
int changeCount = 0;
for (Entry<InstanceInfo, Client> peer : peers.entrySet()) {
// Determine if peers have been removed from the ring
if (Collections.binarySearch(ring, peer.getKey(), comparator) < 0) {
toRemove.add(peer.getKey());
changeCount++;
}
// Determine if peers are no longer appropriate
else if (Collections.binarySearch(instances, peer.getKey(), comparator) < 0) {
toDisconnect.add(peer.getKey());
changeCount++;
}
}
// Add new peers
for (InstanceInfo peer : instances) {
if (!peers.containsKey(peer)) {
toAdd.add(peer);
changeCount++;
}
}
for (InstanceInfo ii : toRemove) {
removePeer(ii);
}
for (InstanceInfo ii : toDisconnect) {
disconnectPeer(ii);
}
for (InstanceInfo ii : toAdd) {
addPeer(ii);
}
generationCounter++;
if (generationCounter > 1 && changeCount != 0)
printPeers(changeCount);
}
/**
* Remove a peer that is no longer in the ring.
* @param instanceInfo
*/
private void removePeer(InstanceInfo instanceInfo) {
System.out.println("Removing peer " + this.instanceInfo + " -> " + instanceInfo);
Client client = peers.remove(instanceInfo);
client.shutdown();
}
/**
* Add a new peer connection
* @param instanceInfo
*/
private void addPeer(InstanceInfo instanceInfo) {
// System.out.println("Adding peer " + this.instanceInfo + " -> " + instanceInfo);
Client client = clientFactory.createClient(instanceInfo);
peers.put(instanceInfo, client);
boostrapClient(client);
}
/**
* Disconnect a peer that is no longer in our path
* @param instanceInfo
*/
private void disconnectPeer(InstanceInfo instanceInfo) {
System.out.println("Disconnect peer " + this.instanceInfo + " -> " + instanceInfo);
Client client = peers.remove(instanceInfo);
if (client != null) {
client.shutdown();
}
else {
System.out.println(instanceInfo + " > " + peers);
}
}
/**
* List all peers to which this server is connected
* @return
*/
public Iterable<InstanceInfo> listPeers() {
return peers.keySet();
}
private void boostrapClient(Client client) {
}
private void printPeers(int changeCount) {
changeCounter.addAndGet(changeCount);
StringBuilder sb = new StringBuilder();
sb.append(">>> " + instanceInfo + " (" + changeCount + " of " + peers.size() + " / " + changeCounter.get() + ") gen=" + generationCounter + "\n");
// for (Entry<InstanceInfo, Client> peer : peers.entrySet()) {
// sb.append(" " + peer.getKey()).append("\n");
// }
//
System.out.print(sb.toString());
}
@Override
public void onMessage(Message message, AsyncResponse response) {
try {
RequestHandler handler = verbHandlers.get(message.getVerb());
if (handler != null) {
handler.onMessage(message, response);
}
}
catch (Exception e) {
// Notify error
}
}
}
| 3,224 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/ServerFactory.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server;
import com.netflix.staash.mesh.InstanceInfo;
public interface ServerFactory {
Server createServer(InstanceInfo ii);
}
| 3,225 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/Server.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server;
/**
* P2P server
* @author elandau
*
*/
public interface Server {
}
| 3,226 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/Session.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server;
public interface Session {
}
| 3,227 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/tasks/RefreshRingRunnable.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server.tasks;
import com.netflix.staash.mesh.InstanceRegistry;
import com.netflix.staash.mesh.server.Server;
public class RefreshRingRunnable implements Runnable {
private final Server server;
private final InstanceRegistry instanceRegistry;
public RefreshRingRunnable(Server server, InstanceRegistry instanceRegistry) {
this.server = server;
this.instanceRegistry = instanceRegistry;
}
@Override
public void run() {
try {
// server.(this.instanceRegistry.getMembers());
}
catch (Exception e) {
e.printStackTrace();
}
}
}
| 3,228 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/handlers/DataResponseHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server.handlers;
import com.netflix.staash.mesh.messages.AsyncResponse;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.RequestHandler;
public class DataResponseHandler implements RequestHandler {
@Override
public void onMessage(Message message, AsyncResponse response) {
}
}
| 3,229 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/handlers/DigestResponseHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server.handlers;
import com.netflix.staash.mesh.messages.AsyncResponse;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.RequestHandler;
public class DigestResponseHandler implements RequestHandler {
@Override
public void onMessage(Message message, AsyncResponse response) {
}
}
| 3,230 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/handlers/DataRequestHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server.handlers;
import com.netflix.staash.mesh.messages.AsyncResponse;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.RequestHandler;
public class DataRequestHandler implements RequestHandler {
@Override
public void onMessage(Message message, AsyncResponse response) {
}
}
| 3,231 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/handlers/DataPushHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server.handlers;
import com.netflix.staash.mesh.messages.AsyncResponse;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.RequestHandler;
public class DataPushHandler implements RequestHandler {
@Override
public void onMessage(Message message, AsyncResponse response) {
}
}
| 3,232 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/server/handlers/DigestRequestHandler.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.server.handlers;
import com.netflix.staash.mesh.messages.AsyncResponse;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.RequestHandler;
public class DigestRequestHandler implements RequestHandler {
@Override
public void onMessage(Message message, AsyncResponse response) {
}
}
| 3,233 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/TopicRegistry.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db;
import java.util.concurrent.ConcurrentMap;
import com.google.common.collect.Maps;
public class TopicRegistry {
private ConcurrentMap<String, Topic> topics = Maps.newConcurrentMap();
private TopicFactory topicFactory;
public TopicRegistry(TopicFactory topicFactory) {
this.topicFactory = topicFactory;
}
public boolean createTopic(String topicName) {
Topic topic = topicFactory.create(topicName);
if (null == topics.putIfAbsent(topicName, topic)) {
return true;
}
return false;
}
public boolean removeTopic(String topicName, long timestamp) {
Topic topic = topics.get(topicName);
if (topic != null) {
return topic.deleteTopic(timestamp);
}
return false;
}
public boolean addEntry(String topicName, Entry tuple) {
Topic topic = topics.get(topicName);
if (topic != null) {
return topic.upsert(tuple);
}
return false;
}
public Iterable<String> listTopics() {
return topics.keySet();
}
}
| 3,234 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/TopicFactory.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db;
public interface TopicFactory {
Topic create(String name);
}
| 3,235 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/Topic.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db;
public interface Topic {
/**
* Insert an entry into the topic
* @param tuple
* @return
*/
public boolean upsert(Entry tuple);
/**
* Read an entry from the topic
* @param key
* @return
*/
public Entry read(String key);
/**
* Delete an entry from the topic
* @param tuple
* @return
*/
public boolean delete(Entry tuple);
/**
* Get the topic name
* @return
*/
public String getName();
/**
* Get the deleted time of the topic.
* @return Time topic was deleted of 0 if it was not
*/
public long getDeletedTime();
/**
* Get the time when the topic was created
* @return
*/
public long getCreatedTime();
/**
* Get the number of entries in the topic
* @return
*/
public long getEntryCount();
/**
* Make the topic as having been deleted. Delete will only apply
* if the topic last modified timestamp is less than the deleted time
* @param timestamp
* @return
*/
public boolean deleteTopic(long timestamp);
}
| 3,236 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/Entry.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db;
public class Entry {
private String key;
private String value;
private long timestamp;
public Entry(String key, String value, long timestamp) {
this.key = key;
this.value = value;
this.timestamp = timestamp;
}
public Entry(String key, String value) {
this.key = key;
this.value = value;
this.timestamp = System.currentTimeMillis();
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
public long getTimestamp() {
return timestamp;
}
public boolean isDeleted() {
return this.value == null;
}
}
| 3,237 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/memory/MemoryTopic.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db.memory;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import com.google.common.collect.Maps;
import com.netflix.staash.mesh.db.Entry;
import com.netflix.staash.mesh.db.Topic;
public class MemoryTopic implements Topic {
private final String name;
private final ConcurrentMap<String, EntryHolder> rows;
private volatile long deletedTimestamp;
private AtomicLong size = new AtomicLong(0);
public MemoryTopic(String name) {
this.name = name;
rows = Maps.newConcurrentMap();
}
@Override
public boolean deleteTopic(long timestamp) {
if (timestamp < deletedTimestamp) {
return false;
}
deletedTimestamp = timestamp;
for (java.util.Map.Entry<String, EntryHolder> entry : rows.entrySet()) {
if (entry.getValue().delete(deletedTimestamp)) {
size.incrementAndGet();
}
}
return true;
}
@Override
public boolean upsert(Entry entry) {
EntryHolder existing = rows.putIfAbsent(entry.getKey(), new EntryHolder(entry));
if (existing != null) {
return existing.set(entry);
}
size.incrementAndGet();
return true;
}
@Override
public Entry read(String key) {
EntryHolder holder = rows.get(key);
if (holder == null) {
return null;
}
return holder.getEntry();
}
@Override
public boolean delete(Entry entry) {
EntryHolder holder = rows.get(entry.getKey());
if (holder != null) {
if (holder.delete(entry.getTimestamp())) {
size.decrementAndGet();
return true;
}
}
return false;
}
@Override
public String getName() {
return name;
}
@Override
public long getCreatedTime() {
return 0;
}
@Override
public long getEntryCount() {
return size.get();
}
@Override
public long getDeletedTime() {
return 0;
}
}
| 3,238 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/memory/EntryHolder.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db.memory;
import com.netflix.staash.mesh.db.Entry;
public class EntryHolder {
private Entry tuple;
public EntryHolder(Entry tuple) {
this.tuple = tuple;
}
public synchronized boolean delete(long timestamp) {
if (timestamp > tuple.getTimestamp()) {
this.tuple = new Entry(tuple.getKey(), null, timestamp);
return true;
}
return false;
}
public synchronized boolean set(Entry tuple) {
if (tuple.getTimestamp() > this.tuple.getTimestamp()) {
this.tuple = tuple;
return true;
}
return false;
}
public synchronized boolean isDeleted() {
return this.tuple.getValue() == null;
}
public synchronized Entry getEntry() {
return tuple;
}
}
| 3,239 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/db/memory/MemoryTopicFactory.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.db.memory;
import com.netflix.staash.mesh.db.Topic;
import com.netflix.staash.mesh.db.TopicFactory;
public class MemoryTopicFactory implements TopicFactory {
@Override
public Topic create(String name) {
return new MemoryTopic(name);
}
}
| 3,240 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/client/Client.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.client;
import com.netflix.staash.mesh.InstanceInfo;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.ResponseHandler;
/**
* Client of a PTP server
*
* @author elandau
*
*/
public interface Client {
InstanceInfo getInstanceInfo();
void sendMessage(Message request, ResponseHandler response);
void shutdown();
void connect();
}
| 3,241 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/client/ClientFactory.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.client;
import com.netflix.staash.mesh.InstanceInfo;
public interface ClientFactory {
Client createClient(InstanceInfo instanceInfo);
}
| 3,242 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/client
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/client/memory/MemoryClientFactory.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.client.memory;
import com.netflix.staash.mesh.InstanceInfo;
import com.netflix.staash.mesh.client.Client;
import com.netflix.staash.mesh.client.ClientFactory;
public class MemoryClientFactory implements ClientFactory {
@Override
public Client createClient(InstanceInfo instanceInfo) {
return new MemoryClient(instanceInfo);
}
}
| 3,243 |
0 |
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/client
|
Create_ds/staash/staash-mesh/src/main/java/com/netflix/staash/mesh/client/memory/MemoryClient.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.staash.mesh.client.memory;
import com.netflix.staash.mesh.InstanceInfo;
import com.netflix.staash.mesh.client.Client;
import com.netflix.staash.mesh.messages.Message;
import com.netflix.staash.mesh.messages.ResponseHandler;
public class MemoryClient implements Client {
private final InstanceInfo instanceInfo;
public MemoryClient(InstanceInfo instanceInfo) {
this.instanceInfo = instanceInfo;
}
@Override
public InstanceInfo getInstanceInfo() {
return instanceInfo;
}
@Override
public void sendMessage(Message request, ResponseHandler response) {
// TODO Auto-generated method stub
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
@Override
public void connect() {
// TODO Auto-generated method stub
}
}
| 3,244 |
0 |
Create_ds/staash/staash-tomcat/src/main/java/com/netflix/staash
|
Create_ds/staash/staash-tomcat/src/main/java/com/netflix/staash/embedded/Tomcat7Server.java
|
package com.netflix.staash.embedded;
import org.apache.catalina.core.AprLifecycleListener;
import org.apache.catalina.core.StandardServer;
public class Tomcat7Server {
// public static void main(String[] args) throws Exception {
// String appBase = "src/main";
// Integer port = 8080;
//
// Tomcat tomcat = new Tomcat();
// tomcat.setPort(port);
//
// tomcat.setBaseDir(".");
// tomcat.getHost().setAppBase(appBase);
//
// String contextPath = "";
//
// // Add AprLifecycleListener
// //StandardServer server = (StandardServer)tomcat.getServer();
// //AprLifecycleListener listener = new AprLifecycleListener();
// // server.addLifecycleListener(listener);
// //server.addLifecycleListener(listener);
//
// tomcat.addWebapp(contextPath, appBase);
// tomcat.start();
// tomcat.getServer().await();
// }
}
| 3,245 |
0 |
Create_ds/staash/staash-tomcat/src/main/java/com/netflix/staash
|
Create_ds/staash/staash-tomcat/src/main/java/com/netflix/staash/embedded/TomcatServer.java
|
package com.netflix.staash.embedded;
import org.apache.catalina.Engine;
import org.apache.catalina.Host;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.connector.Connector;
import org.apache.catalina.core.StandardContext;
import org.apache.catalina.startup.Embedded;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TomcatServer {
private Embedded tomcat;
private int port;
private boolean isRunning;
private static final Logger LOG = LoggerFactory.getLogger(TomcatServer.class);
private static final boolean isInfo = LOG.isInfoEnabled();
public TomcatServer(String contextPath, int port, String appBase, boolean shutdownHook) {
if(contextPath == null || appBase == null || appBase.length() == 0) {
throw new IllegalArgumentException("Context path or appbase should not be null");
}
if(!contextPath.startsWith("/")) {
contextPath = "/" + contextPath;
}
this.port = port;
tomcat = new Embedded();
tomcat.setName("TomcatEmbeddedtomcat");
Host localHost = tomcat.createHost("localhost", appBase);
localHost.setAutoDeploy(false);
StandardContext rootContext = (StandardContext) tomcat.createContext(contextPath, "webapp");
rootContext.setDefaultWebXml("web.xml");
localHost.addChild(rootContext);
Engine engine = tomcat.createEngine();
engine.setDefaultHost(localHost.getName());
engine.setName("TomcatEngine");
engine.addChild(localHost);
tomcat.addEngine(engine);
Connector connector = tomcat.createConnector(localHost.getName(), port, false);
tomcat.addConnector(connector);
// register shutdown hook
if(shutdownHook) {
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
if(isRunning) {
if(isInfo) LOG.info("Stopping the Tomcat tomcat, through shutdown hook");
try {
if (tomcat != null) {
tomcat.stop();
}
} catch (LifecycleException e) {
LOG.error("Error while stopping the Tomcat tomcat, through shutdown hook", e);
}
}
}
});
}
}
/**
* Start the tomcat embedded tomcat
*/
public void start() throws LifecycleException {
if(isRunning) {
LOG.warn("Tomcat tomcat is already running @ port={}; ignoring the start", port);
return;
}
if(isInfo) LOG.info("Starting the Tomcat tomcat @ port={}", port);
tomcat.setAwait(true);
tomcat.start();
isRunning = true;
}
/**
* Stop the tomcat embedded tomcat
*/
public void stop() throws LifecycleException {
if(!isRunning) {
LOG.warn("Tomcat tomcat is not running @ port={}", port);
return;
}
if(isInfo) LOG.info("Stopping the Tomcat tomcat");
tomcat.stop();
isRunning = false;
}
public boolean isRunning() {
return isRunning;
}
public static void main(String[] args) throws Exception{
TomcatServer tomcat = new TomcatServer("staash.war", 8080, "src/main", true);
tomcat.start();
Thread.sleep(1000000);
// TODO Auto-generated catch block
}
}
| 3,246 |
0 |
Create_ds/staash/staash-eureka/src/test/java/com/netflix
|
Create_ds/staash/staash-eureka/src/test/java/com/netflix/paas/EurekaNodeDiscoveryTest.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas;
import java.util.List;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
import com.google.inject.AbstractModule;
import com.google.inject.Injector;
import com.google.inject.name.Names;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.governator.guice.LifecycleInjector;
import com.netflix.paas.cassandra.discovery.EurekaAstyanaxHostSupplier;
import com.netflix.paas.cassandra.discovery.EurekaModule;
public class EurekaNodeDiscoveryTest {
Logger LOG = LoggerFactory.getLogger(EurekaNodeDiscoveryTest.class);
@Test
@Ignore
public void testSupplier() {
List<AbstractModule> modules = Lists.newArrayList(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).annotatedWith(Names.named("groupName")).toInstance("testgroup");
}
},
new EurekaModule()
);
// Create the injector
Injector injector = LifecycleInjector.builder()
.withModules(modules)
.createInjector();
EurekaAstyanaxHostSupplier supplier = injector.getInstance(EurekaAstyanaxHostSupplier.class);
Supplier<List<Host>> list1 = supplier.getSupplier("cass_sandbox");
List<Host> hosts = list1.get();
LOG.info("cass_sandbox");
for (Host host:hosts) {
LOG.info(host.getHostName());
}
Supplier<List<Host>> list2 = supplier.getSupplier("ABCLOUD");
hosts = list2.get();
LOG.info("ABCLOUD");
for (Host host:hosts) {
LOG.info(host.getHostName());
}
Supplier<List<Host>> list3 = supplier.getSupplier("CASSS_PAAS");
hosts = list3.get();
LOG.info("casss_paas");
for (Host host:hosts) {
LOG.info(host.getHostName());
}
}
}
| 3,247 |
0 |
Create_ds/staash/staash-eureka/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-eureka/src/main/java/com/netflix/paas/cassandra/discovery/EurekaClusterDiscoveryService.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.discovery;
import java.util.Collection;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.apache.commons.configuration.AbstractConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import com.google.inject.Inject;
import com.netflix.discovery.DiscoveryClient;
import com.netflix.discovery.DiscoveryManager;
import com.netflix.discovery.shared.Application;
/**
* Implementation of a cluster discovery service using Eureka
*
* @author elandau
*
*/
public class EurekaClusterDiscoveryService implements ClusterDiscoveryService {
private static final Logger LOG = LoggerFactory.getLogger(EurekaClusterDiscoveryService.class);
private static final String PROPERTY_MATCH = "com.netflix.paas.discovery.eureka.match";
private DiscoveryClient client;
private AbstractConfiguration config;
@Inject
public EurekaClusterDiscoveryService(AbstractConfiguration config) {
this.config = config;
initialize();
}
@PostConstruct
public void initialize() {
LOG.info("Initializing Eureka client");
client = DiscoveryManager.getInstance().getDiscoveryClient();
}
@PreDestroy
public void shutdown() {
// TODO: Move this somewhere else
LOG.info("Shutting down Eureka client");
DiscoveryManager.getInstance().shutdownComponent();
client = null;
}
@Override
public Collection<String> getClusterNames() {
final Pattern regex = Pattern.compile(this.config.getString(PROPERTY_MATCH));
return Collections2.filter(
Collections2.transform(client.getApplications().getRegisteredApplications(),
new Function<Application, String>() {
@Override
public String apply(Application app) {
return app.getName();
}
}),
new Predicate<String>() {
@Override
public boolean apply(String clusterName) {
Matcher m = regex.matcher(clusterName);
return m.matches();
}
});
}
@Override
public String getName() {
return "eureka";
}
}
| 3,248 |
0 |
Create_ds/staash/staash-eureka/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-eureka/src/main/java/com/netflix/paas/cassandra/discovery/EurekaModule.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.discovery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.AbstractModule;
import com.google.inject.multibindings.MapBinder;
import com.netflix.appinfo.CloudInstanceConfig;
import com.netflix.discovery.DefaultEurekaClientConfig;
import com.netflix.discovery.DiscoveryManager;
import com.netflix.paas.cassandra.provider.HostSupplierProvider;
import com.netflix.paas.cassandra.tasks.ClusterDiscoveryTask;
import com.netflix.paas.cassandra.tasks.ClusterRefreshTask;
import com.netflix.paas.tasks.InlineTaskManager;
import com.netflix.paas.tasks.TaskManager;
public class EurekaModule extends AbstractModule {
private static final Logger LOG = LoggerFactory.getLogger(EurekaModule.class);
@Override
protected void configure() {
LOG.info("Configuring EurekaModule");
// Initialize eureka
// TODO: Move this to a bootstrap thingy
DiscoveryManager.getInstance().initComponent(
new CloudInstanceConfig(),
new DefaultEurekaClientConfig());
// Eureka - Astyanax integration
MapBinder<String, HostSupplierProvider> hostSuppliers = MapBinder.newMapBinder(binder(), String.class, HostSupplierProvider.class);
hostSuppliers.addBinding("eureka").to(EurekaAstyanaxHostSupplier.class);
//bind(ClusterDiscoveryService.class).to(EurekaClusterDiscoveryService.class).asEagerSingleton();
}
}
| 3,249 |
0 |
Create_ds/staash/staash-eureka/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-eureka/src/main/java/com/netflix/paas/cassandra/discovery/EurekaAstyanaxHostSupplier.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.discovery;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.netflix.appinfo.AmazonInfo;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.appinfo.AmazonInfo.MetaDataKey;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.discovery.DiscoveryClient;
import com.netflix.discovery.DiscoveryManager;
import com.netflix.discovery.shared.Application;
import com.netflix.paas.cassandra.provider.HostSupplierProvider;
public class EurekaAstyanaxHostSupplier implements HostSupplierProvider {
private static final Logger LOG = LoggerFactory.getLogger(EurekaAstyanaxHostSupplier.class);
private final DiscoveryClient eurekaClient;
public EurekaAstyanaxHostSupplier() {
this.eurekaClient = DiscoveryManager.getInstance().getDiscoveryClient();
Preconditions.checkNotNull(this.eurekaClient);
}
@Override
public Supplier<List<Host>> getSupplier(final String clusterName) {
return new Supplier<List<Host>>() {
@Override
public List<Host> get() {
Application app = eurekaClient.getApplication(clusterName.toUpperCase());
List<Host> hosts = Lists.newArrayList();
if (app == null) {
LOG.warn("Cluster '{}' not found in eureka", new Object[]{clusterName});
}
else {
List<InstanceInfo> ins = app.getInstances();
if (ins != null && !ins.isEmpty()) {
hosts = Lists.newArrayList(Collections2.transform(
Collections2.filter(ins, new Predicate<InstanceInfo>() {
@Override
public boolean apply(InstanceInfo input) {
return input.getStatus() == InstanceInfo.InstanceStatus.UP;
}
}), new Function<InstanceInfo, Host>() {
@Override
public Host apply(InstanceInfo info) {
String[] parts = StringUtils.split(
StringUtils.split(info.getHostName(), ".")[0], '-');
Host host = new Host(info.getHostName(), info.getPort())
.addAlternateIpAddress(
StringUtils.join(new String[] { parts[1], parts[2], parts[3],
parts[4] }, "."))
.addAlternateIpAddress(info.getIPAddr())
.setId(info.getId());
try {
if (info.getDataCenterInfo() instanceof AmazonInfo) {
AmazonInfo amazonInfo = (AmazonInfo)info.getDataCenterInfo();
host.setRack(amazonInfo.get(MetaDataKey.availabilityZone));
}
}
catch (Throwable t) {
LOG.error("Error getting rack for host " + host.getName(), t);
}
return host;
}
}));
}
else {
LOG.warn("Cluster '{}' found in eureka but has no instances", new Object[]{clusterName});
}
}
return hosts;
}
};
}
}
| 3,250 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test/core/StaashDeamon.java
|
package com.netflix.staash.test.core;
import org.apache.cassandra.service.CassandraDaemon;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class StaashDeamon extends CassandraDaemon {
private static final Logger logger = LoggerFactory.getLogger(StaashDeamon.class);
private static final StaashDeamon instance = new StaashDeamon();
public static void main(String[] args) {
System.setProperty("cassandra-foreground", "true");
System.setProperty("log4j.defaultInitOverride", "true");
System.setProperty("log4j.configuration", "log4j.properties");
instance.activate();
}
@Override
protected void setup() {
super.setup();
}
@Override
public void init(String[] arguments) throws IOException {
super.init(arguments);
}
@Override
public void start() {
super.start();
}
@Override
public void stop() {
super.stop();
}
}
| 3,251 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test/core/RequiresKeyspace.java
|
package com.netflix.staash.test.core;
import org.apache.cassandra.locator.AbstractReplicationStrategy;
import org.apache.cassandra.locator.SimpleStrategy;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface RequiresKeyspace {
String ksName();
int replication() default 1;
Class<? extends AbstractReplicationStrategy> strategy() default SimpleStrategy.class;
String strategyOptions() default "";
}
| 3,252 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test/core/CassandraRunner.java
|
package com.netflix.staash.test.core;
import java.io.File;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.KSMetaData;
import org.apache.cassandra.db.ColumnFamilyType;
import org.apache.cassandra.db.marshal.CounterColumnType;
import org.apache.cassandra.db.marshal.TypeParser;
import org.apache.cassandra.exceptions.AlreadyExistsException;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.StorageService;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CassandraRunner extends BlockJUnit4ClassRunner {
private static final Logger logger = LoggerFactory.getLogger(CassandraRunner.class);
static StaashDeamon staashDaemon;
static ExecutorService executor = Executors.newSingleThreadExecutor();
public CassandraRunner(Class<?> klass) throws InitializationError {
super(klass);
logger.debug("CassandraRunner constructed with class {}", klass.getName());
}
@Override
protected void runChild(FrameworkMethod method, RunNotifier notifier) {
logger.debug("runChild invoked on method: " + method.getName());
RequiresKeyspace rk = method.getAnnotation(RequiresKeyspace.class);
RequiresColumnFamily rcf = method.getAnnotation(RequiresColumnFamily.class);
if ( rk != null ) {
maybeCreateKeyspace(rk, rcf);
} else if ( rcf != null ) {
maybeCreateColumnFamily(rcf);
}
super.runChild(method, notifier);
}
@Override
public void run(RunNotifier notifier) {
startCassandra();
RequiresKeyspace rk = null;
RequiresColumnFamily rcf = null;
for (Annotation ann : getTestClass().getAnnotations() ) {
if ( ann instanceof RequiresKeyspace ) {
rk = (RequiresKeyspace)ann;
} else if ( ann instanceof RequiresColumnFamily ) {
rcf = (RequiresColumnFamily)ann;
}
}
if ( rk != null ) {
maybeCreateKeyspace(rk, rcf);
} else if ( rcf != null ) {
maybeCreateColumnFamily(rcf);
}
super.run(notifier);
}
private void maybeCreateKeyspace(RequiresKeyspace rk, RequiresColumnFamily rcf) {
logger.debug("RequiresKeyspace annotation has keyspace name: {}", rk.ksName());
List<CFMetaData> cfs = extractColumnFamily(rcf);
try {
MigrationManager
.announceNewKeyspace(KSMetaData.newKeyspace(rk.ksName(),
rk.strategy(), KSMetaData.optsWithRF(rk.replication()), false, cfs));
} catch (AlreadyExistsException aee) {
logger.info("using existing Keyspace for " + rk.ksName());
if ( cfs.size() > 0 ) {
maybeTruncateSafely(rcf);
}
} catch (Exception ex) {
throw new RuntimeException("Failure creating keyspace for " + rk.ksName(),ex);
}
}
private List<CFMetaData> extractColumnFamily(RequiresColumnFamily rcf) {
logger.debug("RequiresColumnFamily has name: {} for ks: {}", rcf.cfName(), rcf.ksName());
List<CFMetaData> cfms = new ArrayList<CFMetaData>();
if ( rcf != null ) {
try {
cfms.add(new CFMetaData(rcf.ksName(), rcf.cfName(),
ColumnFamilyType.Standard, TypeParser.parse(rcf.comparator()), null));
} catch (Exception ex) {
throw new RuntimeException("unable to create column family for: " + rcf.cfName(), ex);
}
}
return cfms;
}
private void maybeCreateColumnFamily(RequiresColumnFamily rcf) {
try {
CFMetaData cfMetaData;
if ( rcf.isCounter() ) {
cfMetaData = new CFMetaData(rcf.ksName(), rcf.cfName(),
ColumnFamilyType.Standard, TypeParser.parse(rcf.comparator()), null)
.replicateOnWrite(false).defaultValidator(CounterColumnType.instance);
} else {
cfMetaData = new CFMetaData(rcf.ksName(), rcf.cfName(),
ColumnFamilyType.Standard, TypeParser.parse(rcf.comparator()), null);
}
MigrationManager.announceNewColumnFamily(cfMetaData);
} catch(AlreadyExistsException aee) {
logger.info("CF already exists for " + rcf.cfName());
maybeTruncateSafely(rcf);
} catch (Exception ex) {
throw new RuntimeException("Could not create CF for: " + rcf.cfName(), ex);
}
}
private void maybeTruncateSafely(RequiresColumnFamily rcf) {
if ( rcf != null && rcf.truncateExisting() ) {
try {
StorageProxy.truncateBlocking(rcf.ksName(), rcf.cfName());
} catch (Exception ex) {
throw new RuntimeException("Could not truncate column family: " + rcf.cfName(),ex);
}
}
}
private void startCassandra() {
if ( staashDaemon != null ) {
return;
}
deleteRecursive(new File("/tmp/staash_cache"));
deleteRecursive(new File ("/tmp/staash_data"));
deleteRecursive(new File ("/tmp/staash_log"));
System.setProperty("cassandra-foreground", "true");
System.setProperty("log4j.defaultInitOverride","true");
System.setProperty("log4j.configuration", "log4j.properties");
System.setProperty("cassandra.ring_delay_ms","1000");
System.setProperty("cassandra.start_rpc","true");
System.setProperty("cassandra.start_native_transport","true");
executor.execute(new Runnable() {
public void run() {
staashDaemon = new StaashDeamon();
staashDaemon.activate();
}
});
try {
TimeUnit.SECONDS.sleep(3);
}
catch (InterruptedException e) {
throw new AssertionError(e);
}
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
logger.error("In shutdownHook");
stopCassandra();
} catch (Exception ex) {
ex.printStackTrace();
}
}
});
}
private void stopCassandra() throws Exception {
if (staashDaemon != null) {
staashDaemon.deactivate();
StorageService.instance.stopClient();
}
executor.shutdown();
executor.shutdownNow();
}
private static boolean deleteRecursive(File path) {
if (!path.exists())
return false;
boolean ret = true;
if (path.isDirectory()){
for (File f : path.listFiles()){
ret = ret && deleteRecursive(f);
}
}
return ret && path.delete();
}
}
| 3,253 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test/core/RequiresColumnFamily.java
|
package com.netflix.staash.test.core;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface RequiresColumnFamily {
String ksName();
String cfName();
boolean isCounter() default false;
String comparator() default "UTF8Type";
String defaultValidator() default "UTF8Type";
String keyValidator() default "UTF8Type";
boolean truncateExisting() default false;
}
| 3,254 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test/core/SampleTest.java
|
package com.netflix.staash.test.core;
import java.util.Map;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.KSMetaData;
import org.apache.cassandra.config.Schema;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(CassandraRunner.class)
@RequiresKeyspace(ksName = "myks")
@RequiresColumnFamily(ksName = "myks", cfName = "uuidtest", comparator = "org.apache.cassandra.db.marshal.UTF8Type", keyValidator = "org.apache.cassandra.db.marshal.UUIDType")
@SuppressWarnings({ "rawtypes", "unchecked" })
@Ignore
public class SampleTest {
@Test
@Ignore
public void mytest2() {
System.out.println("Hello World!");
for (String ks :Schema.instance.getTables()) {
KSMetaData ksm = Schema.instance.getKSMetaData(ks);
Map<String, CFMetaData> cfm = ksm.cfMetaData();
}
}
}
| 3,255 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/test/modules/TestStaashModule.java
|
package com.netflix.staash.test.modules;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.staash.cassandra.discovery.EurekaAstyanaxHostSupplier;
import com.netflix.staash.connection.ConnectionFactory;
import com.netflix.staash.connection.PaasConnectionFactory;
import com.netflix.staash.rest.dao.AstyanaxMetaDaoImpl;
import com.netflix.staash.rest.dao.MetaDao;
import com.netflix.staash.rest.util.MetaConstants;
import com.netflix.staash.service.CacheService;
import com.netflix.staash.service.DataService;
import com.netflix.staash.service.MetaService;
import com.netflix.staash.service.PaasDataService;
import com.netflix.staash.service.PaasMetaService;
public class TestStaashModule extends AbstractModule {
@Provides
@Named("astmetaks")
@Singleton
Keyspace provideKeyspace() {
AstyanaxContext<Keyspace> keyspaceContext = new AstyanaxContext.Builder()
.forCluster("test cluster")
.forKeyspace(MetaConstants.META_KEY_SPACE)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(
NodeDiscoveryType.NONE)
.setConnectionPoolType(
ConnectionPoolType.ROUND_ROBIN)
.setTargetCassandraVersion("1.2")
.setCqlVersion("3.0.0"))
// .withHostSupplier(hs.getSupplier(clustername))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl("localpool"
+ "_" + MetaConstants.META_KEY_SPACE)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(20000)
.setMaxConnsPerHost(3).setInitConnsPerHost(1)
.setSeeds("localhost"+":"+"9160")) //uncomment for localhost
// .withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
Keyspace keyspace;
keyspace = keyspaceContext.getClient();
return keyspace;
}
@Provides
@Named("newmetadao")
@Singleton
MetaDao provideCqlMetaDaoNew(@Named("astmetaks") Keyspace keyspace) {
return new AstyanaxMetaDaoImpl(keyspace);
}
@Provides
MetaService providePaasMetaService(@Named("newmetadao") MetaDao metad, CacheService cache) {
ConnectionFactory fac = new PaasConnectionFactory("astyanax", null);
PaasMetaService metasvc = new PaasMetaService(metad, fac, cache);
return metasvc;
}
@Provides
ConnectionFactory provideConnectionFactory() {
return new PaasConnectionFactory("astyanax", null);
}
@Provides
DataService providePaasDataService( MetaService metasvc) {
ConnectionFactory fac = new PaasConnectionFactory("astyanax", null);
PaasDataService datasvc = new PaasDataService(metasvc, fac);
return datasvc;
}
@Provides
CacheService provideCacheService(@Named("newmetadao") MetaDao metad) {
return new CacheService(metad);
}
@Override
protected void configure() {
}
}
| 3,256 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web/tests/TableTest.java
|
package com.netflix.staash.web.tests;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.staash.json.JsonObject;
import com.netflix.staash.service.PaasDataService;
import com.netflix.staash.service.PaasMetaService;
import com.netflix.staash.test.core.CassandraRunner;
import com.netflix.staash.test.modules.TestStaashModule;
@RunWith(CassandraRunner.class)
public class TableTest {
public static PaasMetaService metasvc;
public static PaasDataService datasvc;
public static final String db = "unitdb1";
public static final String table = "table1";
public static final String tblpay = "{\"name\":\"table1\",\"columns\":\"username,friends,wall,status\",\"primarykey\":\"username\",\"storage\":\"cassandratest\"}";
public static final String insertPay1 = "{\"columns\":\"username,friends,wall,status\",\"values\":\"'rogerfederer','rafa#haas#tommy#sachin#beckham','getting ready for my next#out of wimbledon#out of french','looking fwd to my next match'\"}";
public static final String insertPay2 = "{\"columns\":\"username,friends,wall,status\",\"values\":\"'rafaelnadal','rafa#haas#tommy#sachin#beckham','getting ready for my next#out of wimbledon#out of french','looking fwd to my next match'\"}";
public static final String responseText1 = "{\"1\":{\"username\":\"rogerfederer\",\"friends\":\"rafa#haas#tommy#sachin#beckham\",\"status\":\"looking fwd to my next match\",\"wall\":\"getting ready for my next#out of wimbledon#out of french\"}}";
public static final String responseText2 = "{\"1\":{\"username\":\"rafaelnadal\",\"friends\":\"rafa#haas#tommy#sachin#beckham\",\"status\":\"looking fwd to my next match\",\"wall\":\"getting ready for my next#out of wimbledon#out of french\"}}";
@BeforeClass
public static void setup() {
TestStaashModule pmod = new TestStaashModule();
Injector inj = Guice.createInjector(pmod);
metasvc = inj.getInstance(PaasMetaService.class);
datasvc = inj.getInstance(PaasDataService.class);
StaashTestHelper.createTestStorage(metasvc);
StaashTestHelper.createTestDB(metasvc);
StaashTestHelper.createTestTable(metasvc,tblpay);
System.out.println("Done:");
}
@Test
public void testTableWriteRead() {
datasvc.writeRow(db, table, new JsonObject(insertPay1));
datasvc.writeRow(db, table, new JsonObject(insertPay2));
readRows();
}
private void readRows() {
String out = "";
out = datasvc.listRow(db, table, "username", "rogerfederer");
Assert.assertTrue("Did Not Get What Was Expected", out!=null?out.equals(responseText1):false);
System.out.println("out= "+out);
out = datasvc.listRow(db, table, "username", "rafaelnadal");
Assert.assertTrue("Did Not Get What Was Expected", out!=null?out.equals(responseText2):false);
System.out.println("out= "+out);
}
}
| 3,257 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web/tests/StaashTestHelper.java
|
/*******************************************************************************
* /*
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* *
******************************************************************************/
package com.netflix.staash.web.tests;
import com.netflix.staash.exception.StorageDoesNotExistException;
import com.netflix.staash.json.JsonObject;
import com.netflix.staash.rest.meta.entity.EntityType;
import com.netflix.staash.service.MetaService;
import com.netflix.staash.service.PaasDataService;
public class StaashTestHelper {
public static final String ServerUrl = "http://localhost:8080";
public static final String CreateDBUrl = "/paas/v1/admin";
public static final String CreateDBPayload = "{name: testdb}";
public static final String ListDBUrl = "/paas/v1/admin";
public static final String CreateStorageUrl = "http://localhost:8080/paas/v1/admin/storage";
public static final String CreateStoragePayloadCassandra = "{name:testStorageCass, type: cassandra, cluster:local, replicateto:newcluster";
public static final String ListStorage = "/paas/v1/admin/storage";
public static final String CreateTableUrl = "/paas/v1/admin/testdb";
public static final String CreateTablePayload = "{name:testtable, columns:user,friends,wall,status, primarykey:user, storage: teststoagecass}";
public static final String ListTablesUrl = "/paas/v1/admin/testdb";
public static final String InsertRowUrl = "/paas/v1/data/testdb/testtable";
public static final String InserRowUrlPayload = "{columns:user,friends,wall,status,values:rogerfed,rafanad,blahblah,blahblahblah}";
public static final String ReadRowUrl = "/paas/v1/data/testdb/testtable/username/rogerfed";
public static final String CreateTimeSeriesUrl = "/paa/v1/admin/timeseries/testdb";
public static final String CreateTimeSeriesPayload = "{\"name\":\"testseries\",\"msperiodicity\":10000,\"prefix\":\"rogerfed\"}";
public static final String ListTimeSeriesUrl = "/paas/v1/admin/timeseries/testdb";
public static final String CreateEventUrl = "/paas/v1/admin/timeseries/testdb/testseries";
public static final String CreateEventPayload = "{\"time\":1000000,\"event\":\"{tweet: enjoying a cruise}}\"";
public static final String ReadEventUrl = "/paas/v1/data/timeseries/testdb/testseries/time/100000/prefix/rogerfed";
public static final String storagePayload = "{\"name\": \"cassandratest\",\"type\": \"cassandra\",\"cluster\": \"localhost\",\"rf\":\"replication_factor:1\",\"strategy\":\"SimpleStrategy\"}";
public static final String dbPayload = "{\"name\":\"unitdb1\",\"rf\":\"replication_factor:1\",\"strategy\":\"SimpleStrategy\"}";
public static final String db = "unitdb1";
public static final String timeseries = "timeseries1";
public static final String storage = "cassandratest";
public static void createTestStorage(MetaService metasvc) {
try {
metasvc.writeMetaEntity(EntityType.STORAGE, storagePayload);
} catch (StorageDoesNotExistException e) {
e.printStackTrace();
}
}
public static void createTestDB(MetaService metasvc) {
try {
metasvc.writeMetaEntity(EntityType.DB, dbPayload);
} catch (StorageDoesNotExistException e) {
e.printStackTrace();
}
}
public static void createTestTimeSeries(MetaService metasvc, String tblpay) {
JsonObject pload = new JsonObject(tblpay);
pload.putString("db", db);
try {
metasvc.writeMetaEntity(EntityType.SERIES, pload.toString());
} catch (StorageDoesNotExistException e) {
e.printStackTrace();
}
}
public static void writeEvent(PaasDataService datasvc, JsonObject evPayload) {
datasvc.writeEvent(db, timeseries, evPayload);
}
public static void readEvent() {
}
/*
* Table Test
*/
public static void createTestTable(MetaService metasvc, String tblpay) {
JsonObject pload = new JsonObject(tblpay);
pload.putString("db", db);
try {
metasvc.writeMetaEntity(EntityType.TABLE, pload.toString());
} catch (StorageDoesNotExistException e) {
e.printStackTrace();
}
}
}
| 3,258 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web/tests/TestChunking.java
|
/*******************************************************************************
* /*
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* *
******************************************************************************/
package com.netflix.staash.web.tests;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.recipes.storage.CassandraChunkedStorageProvider;
import com.netflix.astyanax.recipes.storage.ChunkedStorage;
import com.netflix.astyanax.recipes.storage.ChunkedStorageProvider;
import com.netflix.astyanax.recipes.storage.ObjectMetadata;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.staash.test.core.RequiresColumnFamily;
import com.netflix.staash.test.core.RequiresKeyspace;
import com.netflix.staash.test.core.CassandraRunner;
@RunWith(CassandraRunner.class)
@RequiresKeyspace(ksName = "myks")
@RequiresColumnFamily(ksName = "myks", cfName = "chunks", comparator = "org.apache.cassandra.db.marshal.UTF8Type", keyValidator = "org.apache.cassandra.db.marshal.UTF8Type")
@SuppressWarnings({ "rawtypes", "unchecked" })
public class TestChunking {
Keyspace keyspace;
private static final String KS = "myks";
private static final String CF = "chunks";
private static final String ENC1 = "SHA-1";
private static final String ENC2 = "MD5";// optional, less strength
private static final String OBJASC = "testascii";
private static final String OBJBIN = "testbinary";
private static final String FILEASC = "chunktest.html";
private static final String FILEBIN = "test.exe";
@Before
public void setup() {
AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
.forCluster("Test Cluster")
.forKeyspace(KS)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl("MyConnectionPool")
.setPort(9160).setMaxConnsPerHost(1)
.setSeeds("127.0.0.1:9160"))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
context.start();
keyspace = context.getClient();
}
@Test
@Ignore
public void chunktestbinary() throws IOException {
ChunkedStorageProvider provider = new CassandraChunkedStorageProvider(
keyspace, CF);
InputStream fis = null;
InputStream bis = null;
try {
fis = this.getClass().getClassLoader().getResource(FILEBIN)
.openStream();
ObjectMetadata meta = ChunkedStorage
.newWriter(provider, OBJBIN, fis).withChunkSize(0x1000)
.withConcurrencyLevel(8).withTtl(60) // Optional TTL for the
// entire object
.call();
Long writesize = meta.getObjectSize();
// Long readsize = readChunked("myks","chunks","test1");
byte[] written = new byte[writesize.intValue()];
bis =
this.getClass().getClassLoader().getResource(FILEBIN).openStream();
int i1 = ((BufferedInputStream)bis).read(written, 0,
writesize.intValue());
System.out.println("length read = "+i1);
byte[] read = readChunked(KS, CF, OBJBIN);
boolean cmp = compareMD5(written, read);
Assert.assertTrue(cmp == true);
Thread.sleep(1000);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (fis != null)
fis.close();
if (bis!=null)
bis.close();
}
}
@Test
public void chunktestascii() throws IOException {
ChunkedStorageProvider provider = new CassandraChunkedStorageProvider(
keyspace, CF);
InputStream fis = null;
InputStream bis = null;
try {
fis = this.getClass().getClassLoader().getResource(FILEASC)
.openStream();
ObjectMetadata meta = ChunkedStorage
.newWriter(provider, OBJASC, fis).withChunkSize(0x1000)
.withConcurrencyLevel(8).withTtl(60) // Optional TTL for the
// entire object
.call();
Long writesize = meta.getObjectSize();
// Long readsize = readChunked("myks","chunks","test1");
byte[] written = new byte[writesize.intValue()];
bis =
this.getClass().getClassLoader().getResource("chunktest.html").openStream();
int i1 = ((BufferedInputStream)bis).read(written, 0,
writesize.intValue());
System.out.println("length read = "+i1);
byte[] read = readChunked(KS, CF, OBJASC);
boolean cmp = compareMD5(written, read);
Assert.assertTrue(cmp == true);
Thread.sleep(1000);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (fis != null)
fis.close();
if (bis!=null)
bis.close();
}
}
public boolean compareMD5(byte[] written, byte[] read) {
try {
MessageDigest md = MessageDigest.getInstance(ENC1);
byte[] wdigest = md.digest(written);
byte[] rdigest = md.digest(read);
return Arrays.equals(wdigest, rdigest);
} catch (NoSuchAlgorithmException e) {
// TODO Auto-generated catch block
throw new RuntimeException(e.getCause());
}
}
public byte[] readChunked(String db, String table, String objName)
throws Exception {
ChunkedStorageProvider provider = new CassandraChunkedStorageProvider(
keyspace, table);
ObjectMetadata meta = ChunkedStorage.newInfoReader(provider, objName)
.call();
ByteArrayOutputStream os = new ByteArrayOutputStream(meta
.getObjectSize().intValue());
meta = ChunkedStorage.newReader(provider, objName, os)
.withBatchSize(10).call();
return (os != null) ? os.toByteArray() : new byte[0];
}
}
| 3,259 |
0 |
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web
|
Create_ds/staash/staash-web/src/test/java/com/netflix/staash/web/tests/TimeSeriesTest.java
|
package com.netflix.staash.web.tests;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.staash.json.JsonObject;
import com.netflix.staash.service.PaasDataService;
import com.netflix.staash.service.PaasMetaService;
import com.netflix.staash.test.modules.TestStaashModule;
import com.netflix.staash.test.core.CassandraRunner;
//@RequiresKeyspace(ksName = "paasmetaks")
//@RequiresColumnFamily(ksName = "paasmetaks", cfName = "metacf", comparator = "org.apache.cassandra.db.marshal.UTF8Type", keyValidator = "org.apache.cassandra.db.marshal.UTF8Type")
//@SuppressWarnings({ "rawtypes", "unchecked" })
@RunWith(CassandraRunner.class)
public class TimeSeriesTest {
public static PaasMetaService metasvc;
public static PaasDataService datasvc;
public static final String db = "unitdb1";
public static final String timeseries = "testtimeseries1";
public static String timeseriespay = "{\"name\":\"timeseries1\",\"periodicity\":\"10000\",\"prefix\":\"server1\",\"storage\":\"cassandratest\"}";
@BeforeClass
public static void setup() {
TestStaashModule pmod = new TestStaashModule();
Injector inj = Guice.createInjector(pmod);
metasvc = inj.getInstance(PaasMetaService.class);
datasvc = inj.getInstance(PaasDataService.class);
StaashTestHelper.createTestStorage(metasvc);
StaashTestHelper.createTestDB(metasvc);
StaashTestHelper.createTestTimeSeries(metasvc, timeseriespay);
System.out.println("Done:");
}
@Test
public void testTimeseriesWriteRead() {
String payload1="{\"timestamp\":11000,\"event\":\"hi 11k event\",\"prefix\":\"source1\"}";
String payload2="{\"timestamp\":21000,\"event\":\"hi 21k event\",\"prefix\":\"source1\"}";
String payload3="{\"timestamp\":121000,\"event\":\"hi 121k event\",\"prefix\":\"source2\"}";
StaashTestHelper.writeEvent(datasvc, new JsonObject(payload1));
StaashTestHelper.writeEvent(datasvc, new JsonObject(payload2));
StaashTestHelper.writeEvent(datasvc, new JsonObject(payload3));
readTimeSeries();
}
private void readTimeSeries() {
String db = "unitdb1";
String table = "timeseries1";
String out = "";
out = datasvc.readEvent(db, table, "source2","121000");
assert out.equals("{\"1 Jan 1970 00:02:01 GMT\":\"hi 121k event\"}");
System.out.println("out= "+out);
out = datasvc.readEvent(db, table, "source1", "21000");
assert out.equals("{\"1 Jan 1970 00:00:21 GMT\":\"hi 21k event\"}");
System.out.println("out= "+out);
out = datasvc.readEvent(db, table, "source1", "11000");
assert out.equals("{\"1 Jan 1970 00:00:11 GMT\":\"hi 11k event\"}");
System.out.println("out= "+out);
}
}
| 3,260 |
0 |
Create_ds/staash/staash-web/src/main/java/com/netflix/staash
|
Create_ds/staash/staash-web/src/main/java/com/netflix/staash/web/GuiceServletConfig.java
|
/*******************************************************************************
* /*
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* *
******************************************************************************/
package com.netflix.staash.web;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.netflix.governator.guice.LifecycleInjector;
import com.netflix.staash.cassandra.discovery.EurekaModule;
import com.netflix.staash.rest.modules.PaasPropertiesModule;
import com.netflix.staash.rest.resources.StaashAdminResourceImpl;
import com.netflix.staash.rest.resources.StaashDataResourceImpl;
import com.sun.jersey.guice.JerseyServletModule;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
public class GuiceServletConfig extends GuiceServletContextListener {
@Override
protected Injector getInjector() {
return LifecycleInjector.builder()
.withModules(
new EurekaModule(),
new PaasPropertiesModule(),
new JerseyServletModule() {
@Override
protected void configureServlets() {
bind(GuiceContainer.class).asEagerSingleton();
bind(StaashAdminResourceImpl.class);
bind(StaashDataResourceImpl.class);
serve("/*").with(GuiceContainer.class);
}
}
)
.createInjector();
}
}
| 3,261 |
0 |
Create_ds/staash/staash-astyanax/src/test/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/test/java/com/netflix/paas/cassandra/provider/AstyanaxThriftTest.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import com.google.inject.Injector;
import com.google.inject.Scopes;
import com.google.inject.name.Names;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
import com.netflix.governator.guice.LifecycleInjector;
import com.netflix.governator.lifecycle.LifecycleManager;
import com.netflix.paas.PaasBootstrap;
import com.netflix.paas.PaasModule;
import com.netflix.paas.cassandra.CassandraPaasModule;
import com.netflix.paas.cassandra.PaasCassandraBootstrap;
import com.netflix.paas.cassandra.admin.CassandraClusterAdminResource;
import com.netflix.paas.cassandra.admin.CassandraClusterAdminResourceFactory;
import com.netflix.paas.cassandra.discovery.ClusterDiscoveryService;
import com.netflix.paas.cassandra.discovery.LocalClusterDiscoveryService;
import com.netflix.paas.cassandra.entity.ColumnFamilyEntity;
import com.netflix.paas.cassandra.entity.KeyspaceEntity;
import com.netflix.paas.cassandra.keys.ClusterKey;
import com.netflix.paas.cassandra.keys.KeyspaceKey;
import com.netflix.paas.cassandra.provider.KeyspaceClientProvider;
import com.netflix.paas.cassandra.resources.AstyanaxThriftDataTableResource;
import com.netflix.paas.data.QueryResult;
import com.netflix.paas.data.RowData;
import com.netflix.paas.data.SchemalessRows;
import com.netflix.paas.resources.TableDataResource;
public class AstyanaxThriftTest {
private static final Logger LOG = LoggerFactory.getLogger(AstyanaxThriftTest.class)
;
private static Injector injector;
private static final String CLUSTER_NAME = "local";
private static final String KEYSPACE_NAME = "Keyspace1";
private static final String CF_NAME = "ColumnFamily1";
private static final String LOCAL_DISCOVERY_TYPE = "local";
// @BeforeClass
// @Ignore
// public static void initialize() throws Exception {
//
// System.setProperty("com.netflix.paas.title", "HelloPaas");
// System.setProperty("com.netflix.paas.cassandra.dcs", "us-east");
// System.setProperty("com.netflix.paas.schema.configuration.type", "astyanax");
// System.setProperty("com.netflix.paas.schema.configuration.discovery", "local");
// System.setProperty("com.netflix.paas.schema.configuration.cluster", "cass_sandbox");
// System.setProperty("com.netflix.paas.schema.configuration.keyspace", "paas");
// System.setProperty("com.netflix.paas.schema.configuration.strategy_options.replication_factor", "1");
// System.setProperty("com.netflix.paas.schema.configuration.strategy_class", "SimpleStrategy");
// System.setProperty("com.netflix.paas.schema.audit", "configuration");
//
// SingletonEmbeddedCassandra.getInstance();
//
// // Create the injector
// injector = LifecycleInjector.builder()
// .withModules(
// new PaasModule(),
// new CassandraPaasModule(),
// new AbstractModule() {
// @Override
// protected void configure() {
// bind(String.class).annotatedWith(Names.named("groupName")).toInstance("UnitTest1");
// bind(ClusterDiscoveryService.class).to(LocalClusterDiscoveryService.class).in(Scopes.SINGLETON);
//
// bind(PaasBootstrap.class).asEagerSingleton();
// bind(PaasCassandraBootstrap.class).asEagerSingleton();
// }
// })
// .createInjector();
//
// LifecycleManager manager = injector.getInstance(LifecycleManager.class);
// manager.start();
//
// CassandraClusterAdminResourceFactory factory = injector.getInstance(CassandraClusterAdminResourceFactory.class);
//
// // Create Keyspace
// CassandraClusterAdminResource admin = factory.get(new ClusterKey(CLUSTER_NAME, "local"));
// admin.createKeyspace(KeyspaceEntity.builder()
// .withName(KEYSPACE_NAME)
// .withOptions(ImmutableMap.<String, String>builder()
// .put("strategy_class", "SimpleStrategy")
// .put("strategy_options.replication_factor", "1")
// .build())
// .build());
//
// // Create column family
// admin.createColumnFamily(KEYSPACE_NAME, ColumnFamilyEntity.builder()
// .withName(CF_NAME)
// .withOptions(ImmutableMap.<String, String>builder()
// .put("comparator_type", "LongType")
// .put("key_validation_class", "LongType")
// .build())
// .build());
//
// // Create DB from cluster
//
// }
@AfterClass
public static void shutdown() {
}
@Test
@Ignore
public void testReadData() throws Exception {
KeyspaceClientProvider clientProvider = injector.getInstance(KeyspaceClientProvider.class);
Keyspace keyspace = clientProvider.acquireKeyspace(new KeyspaceKey(new ClusterKey(LOCAL_DISCOVERY_TYPE, CLUSTER_NAME), KEYSPACE_NAME));
//
// // Create the keyspace and column family
// keyspace.createKeyspace(new Properties());
// Properties props = new Properties();
// props.setProperty("name", CF_NAME);
// props.setProperty("comparator_type", "LongType");
// props.setProperty("key_validation_class", "LongType");
// keyspace.createColumnFamily(props);
//
// // Add some data
TableDataResource thriftDataTableResource = new AstyanaxThriftDataTableResource(keyspace, CF_NAME);
String rowKey = "100";
SchemalessRows.Builder builder = SchemalessRows.builder();
builder.addRow(rowKey, ImmutableMap.<String, String>builder().put("1", "11").put("2", "22").build());
RowData dr = new RowData();
dr.setSrows(builder.build());
// thriftDataTableResource.updateRow(rowKey, dr);
QueryResult result;
result = thriftDataTableResource.readRow(rowKey, 1, null, null, false);
// Assert.assertEquals(1, Iterables.getFirst(result.getSrows().getRows(), null).getColumns().size());
LOG.info(result.toString());
result = thriftDataTableResource.readRow(rowKey, 10, null, null, false);
// Assert.assertEquals(2, Iterables.getFirst(result.getRows(), null).getColumns().size());
LOG.info(result.toString());
}
@Test
@Ignore
public void testAdmin() {
}
}
| 3,262 |
0 |
Create_ds/staash/staash-astyanax/src/test/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/test/java/com/netflix/paas/cassandra/provider/LocalClusterClientProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import com.netflix.astyanax.Cluster;
import com.netflix.paas.cassandra.keys.ClusterKey;
public class LocalClusterClientProvider implements ClusterClientProvider {
@Override
public Cluster acquireCluster(ClusterKey clusterName) {
return null;
}
@Override
public void releaseCluster(ClusterKey clusterName) {
}
}
| 3,263 |
0 |
Create_ds/staash/staash-astyanax/src/test/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/test/java/com/netflix/paas/cassandra/provider/SingletonEmbeddedCassandra.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import com.netflix.astyanax.test.EmbeddedCassandra;
public class SingletonEmbeddedCassandra {
private final EmbeddedCassandra cassandra;
private static class Holder {
private final static SingletonEmbeddedCassandra instance = new SingletonEmbeddedCassandra();
}
public static SingletonEmbeddedCassandra getInstance() {
return Holder.instance;
}
public SingletonEmbeddedCassandra() {
try {
cassandra = new EmbeddedCassandra();
cassandra.start();
} catch (Exception e) {
throw new RuntimeException("Failed to start embedded cassandra", e);
}
}
public void finalize() {
try {
cassandra.stop();
}
catch (Exception e) {
}
}
}
| 3,264 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/meta/CqlMetaDaoImpl.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.meta;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.ColumnSpecification;
import org.apache.cassandra.thrift.CqlResult;
import org.apache.cassandra.thrift.CqlRow;
import org.apache.cassandra.transport.messages.ResultMessage;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.ColumnDefinitions.Definition;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.exceptions.AlreadyExistsException;
import com.datastax.driver.core.exceptions.DriverException;
import com.google.inject.Inject;
import com.netflix.paas.json.JsonObject;
import com.netflix.paas.meta.dao.MetaDao;
import com.netflix.paas.meta.entity.Entity;
import com.netflix.paas.meta.entity.PaasDBEntity;
import com.netflix.paas.meta.entity.PaasTableEntity;
import com.netflix.paas.util.Pair;
import static com.datastax.driver.core.querybuilder.QueryBuilder.*;
public class CqlMetaDaoImpl implements MetaDao{
private Cluster cluster;
private Session session;
private static boolean schemaCreated = false;
private static final String metaks = "paasmetaks";
private static final String metacf = "metacf";
@Inject
public CqlMetaDaoImpl(Cluster cluster) {
this.cluster = cluster;
this.session = this.cluster.connect();
maybeCreateMetaSchema();
}
@Override
public void writeMetaEntity(Entity entity) {
//if (entity instanceof PaasDBEntity) {
//implies its a create request
//insert into the meta some values for this dbentity
//wait for creation of the actual keyspace
try {
session.execute(String.format(PaasUtils.INSERT_FORMAT, metaks+"."+metacf, entity.getRowKey(),entity.getName(),entity.getPayLoad()));
} catch (AlreadyExistsException e) {
// It's ok, ignore
}
//}
if (entity instanceof PaasTableEntity) {
//first create/check if schema db exists
PaasTableEntity tableEnt = (PaasTableEntity)entity;
try {
String schemaName = tableEnt.getSchemaName();
session.execute(String.format(PaasUtils.CREATE_KEYSPACE_SIMPLE_FORMAT, schemaName, 1));
} catch (AlreadyExistsException e) {
// It's ok, ignore
}
//if schema/db already exists now create the table
String query = BuildQuery(tableEnt);
Print(query);
session.execute(query);
//List<String> primaryKeys = entity.getPrimaryKey();
}
}
public void writeRow(String db, String table,JsonObject rowObj) {
String query = BuildRowInsertQuery(db, table, rowObj);
Print(query);
session.execute(query);
}
private String BuildRowInsertQuery(String db, String table, JsonObject rowObj) {
// TODO Auto-generated method stub
String columns = rowObj.getString("columns");
String values = rowObj.getString("values");
return "INSERT INTO"+" "+db+"."+table+"("+columns+")"+" VALUES("+values+");";
}
private void Print(String str) {
// TODO Auto-generated method stub
System.out.println(str);
}
private String BuildQuery(PaasTableEntity tableEnt) {
// TODO Auto-generated method stub
String schema = tableEnt.getSchemaName();
String tableName = tableEnt.getName();
List<Pair<String, String>> columns = tableEnt.getColumns();
String primary = tableEnt.getPrimarykey();
String colStrs = "";
for (Pair<String, String> colPair:columns) {
colStrs = colStrs+colPair.getRight()+" "+colPair.getLeft()+", ";
}
String primarykeys = tableEnt.getPrimarykey();
String PRIMARYSTR = "PRIMARY KEY("+primarykeys+")";
return "CREATE TABLE "+schema+"."+tableName+" ("+colStrs+" "+PRIMARYSTR+");";
}
public void maybeCreateMetaSchema() {
try {
if (schemaCreated)
return;
try {
session.execute(String.format(PaasUtils.CREATE_KEYSPACE_SIMPLE_FORMAT, metaks, 1));
} catch (AlreadyExistsException e) {
// It's ok, ignore
}
session.execute("USE " + metaks);
for (String tableDef : getTableDefinitions()) {
try {
session.execute(tableDef);
} catch (AlreadyExistsException e) {
// It's ok, ignore
}
}
schemaCreated = true;
} catch (DriverException e) {
throw e;
}
}
protected Collection<String> getTableDefinitions() {
// String sparse = "CREATE TABLE sparse (\n"
// + " k text,\n"
// + " c1 int,\n"
// + " c2 float,\n"
// + " l list<text>,\n"
// + " v int,\n"
// + " PRIMARY KEY (k, c1, c2)\n"
// + ");";
//
// String st = "CREATE TABLE static (\n"
// + " k text,\n"
// + " i int,\n"
// + " m map<text, timeuuid>,\n"
// + " v int,\n"
// + " PRIMARY KEY (k)\n"
// + ");";
//
// String compactStatic = "CREATE TABLE compact_static (\n"
// + " k text,\n"
// + " i int,\n"
// + " t timeuuid,\n"
// + " v int,\n"
// + " PRIMARY KEY (k)\n"
// + ") WITH COMPACT STORAGE;";
//similar to old paas.db table, contains only the metadata about the paas entities
String metaDynamic = "CREATE TABLE metacf (\n"
+ " key text,\n"
+ " column1 text,\n"
+ " value text,\n"
+ " PRIMARY KEY (key, column1)\n"
+ ") WITH COMPACT STORAGE;";
// String compactComposite = "CREATE TABLE compact_composite (\n"
// + " k text,\n"
// + " c1 int,\n"
// + " c2 float,\n"
// + " c3 double,\n"
// + " v timeuuid,\n"
// + " PRIMARY KEY (k, c1, c2, c3)\n"
// + ") WITH COMPACT STORAGE;";
// String withOptions = "CREATE TABLE with_options (\n"
// + " k text,\n"
// + " i int,\n"
// + " PRIMARY KEY (k)\n"
// + ") WITH read_repair_chance = 0.5\n"
// + " AND dclocal_read_repair_chance = 0.6\n"
// + " AND replicate_on_write = true\n"
// + " AND gc_grace_seconds = 42\n"
// + " AND bloom_filter_fp_chance = 0.01\n"
// + " AND caching = ALL\n"
// + " AND comment = 'My awesome table'\n"
// + " AND compaction = { 'class' : 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy', 'sstable_size_in_mb' : 15 }\n"
// + " AND compression = { 'sstable_compression' : 'org.apache.cassandra.io.compress.SnappyCompressor', 'chunk_length_kb' : 128 };";
List<String> allDefs = new ArrayList<String>();
allDefs.add(metaDynamic);
return allDefs;
}
@Override
public Entity readMetaEntity(String rowKey) {
// TODO Auto-generated method stub
return null;
}
@Override
public String listRow(String db, String table, String keycol, String key) {
// TODO Auto-generated method stub
String query = select().all().from(db, table).where(eq(keycol,key)).getQueryString();
ResultSet rs = session.execute(query);
return convertResultSet(rs);
}
private String convertResultSet(ResultSet rs) {
// TODO Auto-generated method stub
String colStr = "";
String rowStr = "";
JsonObject response = new JsonObject();
List<Row> rows = rs.all();
if (!rows.isEmpty() && rows.size()==1) {
rowStr = rows.get(0).toString();
}
ColumnDefinitions colDefs = rs.getColumnDefinitions();
colStr = colDefs.toString();
response.putString("columns", colStr.substring(8,colStr.length()-1));
response.putString("values", rowStr.substring(4,rowStr.length()-1));
return response.toString();
// for (Row ro:rows) {
// Print(ro.toString());
//// ro.getColumnDefinitions()
// }
// return null;
// if (rm.kind == ResultMessage.Kind.ROWS) {
// //ToDo maybe processInternal
// boolean bSwitch = true;
// if (bSwitch) {
// ResultMessage.Rows cqlRows = (ResultMessage.Rows) rm;
// List<ColumnSpecification> columnSpecs = cqlRows.result.metadata.names;
//
// for (List<ByteBuffer> row : cqlRows.result.rows) {
// Map<String,Object> map = new HashMap<String,Object>();
// int i = 0;
// for (ByteBuffer bytes : row) {
// ColumnSpecification specs = columnSpecs.get(i++);
// if (specs.name!=null && specs.type!=null && bytes!=null && bytes.hasRemaining()) {
// System.out.println("name = "+specs.name.toString()+" ,type= "+specs.type.compose(bytes));
// map.put(specs.name.toString(), specs.type.compose(bytes));
// }
// }
// returnRows.add(map);
// }
// } else {
// boolean convert = true;;
// CqlResult result = rm.toThriftResult();
// List<CqlRow> rows = result.getRows();
// for (CqlRow row: rows) {
// List<org.apache.cassandra.thrift.Column> columns = row.getColumns();
// for (org.apache.cassandra.thrift.Column c: columns){
// HashMap<String,Object> m = new HashMap<String,Object>();
// if (convert) {
// m.put("name" , TypeHelper.getCqlTyped(result.schema.name_types.get(c.name), c.name) );
// m.put("value" , TypeHelper.getCqlTyped(result.schema.name_types.get(c.name), c.value) );
// } else {
// m.put("value", TypeHelper.getBytes(c.value));
// m.put("name", TypeHelper.getBytes(c.name));
// }
// returnRows.add(m);
// }
// }
// }
// }
// JsonObject response = new JsonObject();
// JsonArray array = new JsonArray();
// for (Map<String,Object> m : returnRows) {
// array.add(new JsonObject(m));
// }
// response.putString(Long.toString(counter.incrementAndGet()), "OK");
// response.putArray(Long.toString(counter.incrementAndGet()), array);
// String testQry = "CREATE KEYSPACE testdb WITH REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor': 1};";
//// create("testdb",1);
// return response.toString();
// return null;
// }
}
}
| 3,265 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/meta/PaasUtils.java
|
/*
* Copyright (C) 2012 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.paas.dao.meta;
import java.math.*;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.*;
/**
* A number of static fields/methods handy for tests.
*/
public abstract class PaasUtils {
private static final Logger logger = LoggerFactory.getLogger(PaasUtils.class);
public static final String CREATE_KEYSPACE_SIMPLE_FORMAT = "CREATE KEYSPACE %s WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor' : %d }";
public static final String CREATE_KEYSPACE_GENERIC_FORMAT = "CREATE KEYSPACE %s WITH replication = { 'class' : '%s', %s }";
public static final String SIMPLE_KEYSPACE = "ks";
public static final String SIMPLE_TABLE = "test";
public static final String CREATE_TABLE_SIMPLE_FORMAT = "CREATE TABLE %s (k text PRIMARY KEY, t text, i int, f float)";
public static final String INSERT_FORMAT = "INSERT INTO %s (key, column1, value) VALUES ('%s', '%s', '%s')";
public static final String SELECT_ALL_FORMAT = "SELECT * FROM %s";
public static BoundStatement setBoundValue(BoundStatement bs, String name, DataType type, Object value) {
switch (type.getName()) {
case ASCII:
bs.setString(name, (String)value);
break;
case BIGINT:
bs.setLong(name, (Long)value);
break;
case BLOB:
bs.setBytes(name, (ByteBuffer)value);
break;
case BOOLEAN:
bs.setBool(name, (Boolean)value);
break;
case COUNTER:
// Just a no-op, we shouldn't handle counters the same way than other types
break;
case DECIMAL:
bs.setDecimal(name, (BigDecimal)value);
break;
case DOUBLE:
bs.setDouble(name, (Double)value);
break;
case FLOAT:
bs.setFloat(name, (Float)value);
break;
case INET:
bs.setInet(name, (InetAddress)value);
break;
case INT:
bs.setInt(name, (Integer)value);
break;
case TEXT:
bs.setString(name, (String)value);
break;
case TIMESTAMP:
bs.setDate(name, (Date)value);
break;
case UUID:
bs.setUUID(name, (UUID)value);
break;
case VARCHAR:
bs.setString(name, (String)value);
break;
case VARINT:
bs.setVarint(name, (BigInteger)value);
break;
case TIMEUUID:
bs.setUUID(name, (UUID)value);
break;
case LIST:
bs.setList(name, (List)value);
break;
case SET:
bs.setSet(name, (Set)value);
break;
case MAP:
bs.setMap(name, (Map)value);
break;
default:
throw new RuntimeException("Missing handling of " + type);
}
return bs;
}
public static Object getValue(Row row, String name, DataType type) {
switch (type.getName()) {
case ASCII:
return row.getString(name);
case BIGINT:
return row.getLong(name);
case BLOB:
return row.getBytes(name);
case BOOLEAN:
return row.getBool(name);
case COUNTER:
return row.getLong(name);
case DECIMAL:
return row.getDecimal(name);
case DOUBLE:
return row.getDouble(name);
case FLOAT:
return row.getFloat(name);
case INET:
return row.getInet(name);
case INT:
return row.getInt(name);
case TEXT:
return row.getString(name);
case TIMESTAMP:
return row.getDate(name);
case UUID:
return row.getUUID(name);
case VARCHAR:
return row.getString(name);
case VARINT:
return row.getVarint(name);
case TIMEUUID:
return row.getUUID(name);
case LIST:
return row.getList(name, classOf(type.getTypeArguments().get(0)));
case SET:
return row.getSet(name, classOf(type.getTypeArguments().get(0)));
case MAP:
return row.getMap(name, classOf(type.getTypeArguments().get(0)), classOf(type.getTypeArguments().get(1)));
}
throw new RuntimeException("Missing handling of " + type);
}
private static Class classOf(DataType type) {
assert !type.isCollection();
switch (type.getName()) {
case ASCII:
case TEXT:
case VARCHAR:
return String.class;
case BIGINT:
case COUNTER:
return Long.class;
case BLOB:
return ByteBuffer.class;
case BOOLEAN:
return Boolean.class;
case DECIMAL:
return BigDecimal.class;
case DOUBLE:
return Double.class;
case FLOAT:
return Float.class;
case INET:
return InetAddress.class;
case INT:
return Integer.class;
case TIMESTAMP:
return Date.class;
case UUID:
case TIMEUUID:
return UUID.class;
case VARINT:
return BigInteger.class;
}
throw new RuntimeException("Missing handling of " + type);
}
// Always return the "same" value for each type
public static Object getFixedValue(final DataType type) {
try {
switch (type.getName()) {
case ASCII:
return "An ascii string";
case BIGINT:
return 42L;
case BLOB:
return ByteBuffer.wrap(new byte[]{ (byte)4, (byte)12, (byte)1 });
case BOOLEAN:
return true;
case COUNTER:
throw new UnsupportedOperationException("Cannot 'getSomeValue' for counters");
case DECIMAL:
return new BigDecimal("3.1415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679");
case DOUBLE:
return 3.142519;
case FLOAT:
return 3.142519f;
case INET:
return InetAddress.getByAddress(new byte[]{(byte)127, (byte)0, (byte)0, (byte)1});
case INT:
return 24;
case TEXT:
return "A text string";
case TIMESTAMP:
return new Date(1352288289L);
case UUID:
return UUID.fromString("087E9967-CCDC-4A9B-9036-05930140A41B");
case VARCHAR:
return "A varchar string";
case VARINT:
return new BigInteger("123456789012345678901234567890");
case TIMEUUID:
return UUID.fromString("FE2B4360-28C6-11E2-81C1-0800200C9A66");
case LIST:
return new ArrayList(){{ add(getFixedValue(type.getTypeArguments().get(0))); }};
case SET:
return new HashSet(){{ add(getFixedValue(type.getTypeArguments().get(0))); }};
case MAP:
return new HashMap(){{ put(getFixedValue(type.getTypeArguments().get(0)), getFixedValue(type.getTypeArguments().get(1))); }};
}
} catch (Exception e) {
throw new RuntimeException(e);
}
throw new RuntimeException("Missing handling of " + type);
}
// Always return the "same" value for each type
public static Object getFixedValue2(final DataType type) {
try {
switch (type.getName()) {
case ASCII:
return "A different ascii string";
case BIGINT:
return Long.MAX_VALUE;
case BLOB:
ByteBuffer bb = ByteBuffer.allocate(64);
bb.putInt(0xCAFE);
bb.putShort((short) 3);
bb.putShort((short) 45);
return bb;
case BOOLEAN:
return false;
case COUNTER:
throw new UnsupportedOperationException("Cannot 'getSomeValue' for counters");
case DECIMAL:
return new BigDecimal("12.3E+7");
case DOUBLE:
return Double.POSITIVE_INFINITY;
case FLOAT:
return Float.POSITIVE_INFINITY;
case INET:
return InetAddress.getByName("123.123.123.123");
case INT:
return Integer.MAX_VALUE;
case TEXT:
return "r??sum??";
case TIMESTAMP:
return new Date(872835240000L);
case UUID:
return UUID.fromString("067e6162-3b6f-4ae2-a171-2470b63dff00");
case VARCHAR:
return "A different varchar r??sum??";
case VARINT:
return new BigInteger(Integer.toString(Integer.MAX_VALUE) + "000");
case TIMEUUID:
return UUID.fromString("FE2B4360-28C6-11E2-81C1-0800200C9A66");
case LIST:
return new ArrayList(){{ add(getFixedValue2(type.getTypeArguments().get(0))); }};
case SET:
return new HashSet(){{ add(getFixedValue2(type.getTypeArguments().get(0))); }};
case MAP:
return new HashMap(){{ put(getFixedValue2(type.getTypeArguments().get(0)), getFixedValue2(type.getTypeArguments().get(1))); }};
}
} catch (Exception e) {
throw new RuntimeException(e);
}
throw new RuntimeException("Missing handling of " + type);
}
// Wait for a node to be up and running
// This is used because there is some delay between when a node has been
// added through ccm and when it's actually available for querying
public static void waitFor(String node, Cluster cluster) {
waitFor(node, cluster, 20, false, false);
}
public static void waitFor(String node, Cluster cluster, int maxTry) {
waitFor(node, cluster, maxTry, false, false);
}
public static void waitForDown(String node, Cluster cluster) {
waitFor(node, cluster, 20, true, false);
}
public static void waitForDownWithWait(String node, Cluster cluster, int waitTime) {
waitFor(node, cluster, 20, true, false);
// FIXME: Once stop() works, remove this line
try {
Thread.sleep(waitTime * 1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public static void waitForDown(String node, Cluster cluster, int maxTry) {
waitFor(node, cluster, maxTry, true, false);
}
public static void waitForDecommission(String node, Cluster cluster) {
waitFor(node, cluster, 20, true, true);
}
public static void waitForDecommission(String node, Cluster cluster, int maxTry) {
waitFor(node, cluster, maxTry, true, true);
}
private static void waitFor(String node, Cluster cluster, int maxTry, boolean waitForDead, boolean waitForOut) {
if (waitForDead || waitForOut)
if (waitForDead)
logger.info("Waiting for stopped node: " + node);
else if (waitForOut)
logger.info("Waiting for decommissioned node: " + node);
else
logger.info("Waiting for upcoming node: " + node);
// In the case where the we've killed the last node in the cluster, if we haven't
// tried doing an actual query, the driver won't realize that last node is dead until
// keep alive kicks in, but that's a fairly long time. So we cheat and trigger a force
// the detection by forcing a request.
// if (waitForDead || waitForOut)
// cluster.manager.submitSchemaRefresh(null, null);
InetAddress address;
try {
address = InetAddress.getByName(node);
} catch (Exception e) {
// That's a problem but that's not *our* problem
return;
}
Metadata metadata = cluster.getMetadata();
for (int i = 0; i < maxTry; ++i) {
for (Host host : metadata.getAllHosts()) {
if (host.getAddress().equals(address) && testHost(host, waitForDead))
return;
}
try { Thread.sleep(1000); } catch (Exception e) {}
}
for (Host host : metadata.getAllHosts()) {
if (host.getAddress().equals(address)) {
if (testHost(host, waitForDead)) {
return;
} else {
// logging it because this give use the timestamp of when this happens
logger.info(node + " is not " + (waitForDead ? "DOWN" : "UP") + " after " + maxTry + "s");
throw new IllegalStateException(node + " is not " + (waitForDead ? "DOWN" : "UP") + " after " + maxTry + "s");
}
}
}
if (waitForOut){
return;
} else {
logger.info(node + " is not part of the cluster after " + maxTry + "s");
throw new IllegalStateException(node + " is not part of the cluster after " + maxTry + "s");
}
}
private static boolean testHost(Host host, boolean testForDown) {
return testForDown ? !host.getMonitor().isUp() : host.getMonitor().isUp();
}
}
| 3,266 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/AstyanaxDaoSchemaProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
import java.util.Collection;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Properties;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.apache.commons.configuration.AbstractConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.inject.Inject;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.exceptions.BadRequestException;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.paas.cassandra.provider.KeyspaceClientProvider;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoSchemaProvider;
import com.netflix.paas.dao.DaoSchema;
import com.netflix.paas.exceptions.NotFoundException;
/**
* Astyanax based Dao factory for persisting PAAS state
*
* @author elandau
*
*/
public class AstyanaxDaoSchemaProvider implements DaoSchemaProvider {
private final Logger LOG = LoggerFactory.getLogger(AstyanaxDaoSchemaProvider.class);
private final static String CONFIG_PREFIX_FORMAT = "com.netflix.paas.schema.%s";
private final KeyspaceClientProvider keyspaceProvider;
private final Map<String, DaoSchema> schemas = Maps.newHashMap();
private final AbstractConfiguration configuration;
public class AstyanaxDaoSchema implements DaoSchema {
private final IdentityHashMap<Class<?>, Dao<?>> daos = Maps.newIdentityHashMap();
private final Keyspace keyspace;
private final String schemaName;
public AstyanaxDaoSchema(String schemaName, Keyspace keyspace) {
this.keyspace = keyspace;
this.schemaName = schemaName;
Configuration config = configuration.subset(String.format(CONFIG_PREFIX_FORMAT, schemaName.toLowerCase()));
if (config.getBoolean("autocreate", false)) {
try {
createSchema();
}
catch (Exception e) {
LOG.error("Error creating column keyspace", e);
}
}
}
@Override
public synchronized void createSchema() {
final Properties props = ConfigurationConverter.getProperties(configuration.subset(String.format(CONFIG_PREFIX_FORMAT, schemaName.toLowerCase())));
try {
props.setProperty("name", props.getProperty("keyspace"));
LOG.info("Creating schema: " + schemaName + " " + props);
this.keyspace.createKeyspace(props);
} catch (ConnectionException e) {
LOG.error("Failed to create schema '{}' with properties '{}'", new Object[]{schemaName, props.toString(), e});
throw new RuntimeException("Failed to create keyspace " + keyspace.getKeyspaceName(), e);
}
}
@Override
public synchronized void dropSchema() {
try {
this.keyspace.dropKeyspace();
} catch (ConnectionException e) {
throw new RuntimeException("Failed to drop keyspace " + keyspace.getKeyspaceName(), e);
}
}
@Override
public synchronized Collection<Dao<?>> listDaos() {
return Lists.newArrayList(daos.values());
}
@Override
public boolean isExists() {
try {
this.keyspace.describeKeyspace();
return true;
}
catch (BadRequestException e) {
return false;
}
catch (Exception e) {
throw new RuntimeException("Failed to determine if keyspace " + keyspace.getKeyspaceName() + " exists", e);
}
}
@Override
public synchronized <T> Dao<T> getDao(Class<T> type) {
Dao<?> dao = daos.get(type);
if (dao == null) {
dao = new AstyanaxDao<T>(keyspace, type);
daos.put(type, dao);
}
return (Dao<T>) dao;
}
}
@Inject
public AstyanaxDaoSchemaProvider(KeyspaceClientProvider keyspaceProvider, AbstractConfiguration configuration) {
this.keyspaceProvider = keyspaceProvider;
this.configuration = configuration;
}
@PostConstruct
public void start() {
}
@PreDestroy
public void stop() {
}
@Override
public synchronized Collection<DaoSchema> listSchemas() {
return Lists.newArrayList(schemas.values());
}
@Override
public synchronized DaoSchema getSchema(String schemaName) throws NotFoundException {
AstyanaxDaoSchema schema = (AstyanaxDaoSchema)schemas.get(schemaName);
if (schema == null) {
LOG.info("Creating schema '{}'", new Object[]{schemaName});
Keyspace keyspace = keyspaceProvider.acquireKeyspace(schemaName);
schema = new AstyanaxDaoSchema(schemaName, keyspace);
schemas.put(schemaName, schema);
}
return schema;
}
}
| 3,267 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/Indexer.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
import java.util.Collection;
import java.util.Map;
/**
* Very very very simple indexing API.
*
* @author elandau
*
*/
public interface Indexer {
/**
* Add the id to the tags
* @param id
* @param tags
*/
public void tagId(String id, Map<String, String> tags) throws IndexerException ;
/**
* Remove id from all it's tags
* @param id
*/
public void removeId(String id) throws IndexerException ;
/**
* Get all tags for a document
* @param id
* @return
*/
public Map<String, String> getTags(String id) throws IndexerException;
/**
* Find all ids that have one or more of these tags
* @param tags
* @return
*/
public Collection<String> findUnion(Map<String, String> tags) throws IndexerException ;
/**
* Find all ids that have all of the tags
* @param tags
* @return
*/
public Collection<String> findIntersection(Map<String, String> tags) throws IndexerException ;
/**
* Find all ids that match the tag
* @param tag
* @return
*/
public Collection<String> find(String name, String value) throws IndexerException ;
/**
* Create the underlying storage
*/
public void createStorage() throws IndexerException ;
}
| 3,268 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/AstyanaxDao.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
import java.util.Collection;
import java.util.List;
import javax.persistence.PersistenceException;
import org.apache.commons.lang.StringUtils;
import com.google.common.base.CaseFormat;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.entitystore.DefaultEntityManager;
import com.netflix.astyanax.entitystore.EntityManager;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.recipes.reader.AllRowsReader;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.paas.dao.Dao;
/**
* Simple implementation of a Dao on top of the astyanax EntityManager API
* @author elandau
*
* @param <T>
*/
public class AstyanaxDao<T> implements Dao<T> {
private final static String DAO_NAME = "astyanax";
private final EntityManager<T, String> manager;
private final Keyspace keyspace;
private final ColumnFamily<String, String> columnFamily;
private final String entityName;
private final String prefix;
private static String entityNameFromClass(Class<?> entityType) {
return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE,
StringUtils.removeEnd(StringUtils.substringAfterLast(entityType.getName(), "."), "Entity"));
}
public AstyanaxDao(Keyspace keyspace, Class<T> entityType) {
this.keyspace = keyspace;
this.entityName = entityNameFromClass(entityType);
this.columnFamily = new ColumnFamily<String, String>(this.entityName, StringSerializer.get(), StringSerializer.get());
this.prefix = "";
manager = new DefaultEntityManager.Builder<T, String>()
.withKeyspace(keyspace)
.withColumnFamily(columnFamily)
.withEntityType(entityType)
.build();
}
public AstyanaxDao(Keyspace keyspace, Class<T> entityType, String columnFamilyName) {
this.keyspace = keyspace;
this.entityName = entityNameFromClass(entityType);
this.columnFamily = new ColumnFamily<String, String>(columnFamilyName, StringSerializer.get(), StringSerializer.get());
this.prefix = this.entityName + ":";
manager = new DefaultEntityManager.Builder<T, String>()
.withKeyspace(keyspace)
.withColumnFamily(columnFamily)
.withEntityType(entityType)
.build();
}
@Override
public T read(String id) throws PersistenceException {
return this.manager.get(id);
}
@Override
public void write(T entity) throws PersistenceException {
this.manager.put(entity);
}
@Override
public Collection<T> list() throws PersistenceException{
return this.manager.getAll();
}
@Override
public void delete(String id) throws PersistenceException{
this.manager.delete(id);
}
@Override
public void createTable() throws PersistenceException {
try {
keyspace.createColumnFamily(columnFamily, null);
} catch (ConnectionException e) {
throw new PersistenceException("Failed to create column family : " + columnFamily.getName(), e);
}
}
@Override
public void deleteTable() throws PersistenceException{
try {
keyspace.dropColumnFamily(columnFamily);
} catch (ConnectionException e) {
throw new PersistenceException("Failed to drop column family : " + columnFamily.getName(), e);
}
}
@Override
public String getEntityType() {
return this.entityName;
}
@Override
public String getDaoType() {
return DAO_NAME;
}
@Override
public Boolean healthcheck() {
return isExists();
}
@Override
public Boolean isExists() {
try {
return keyspace.describeKeyspace().getColumnFamily(columnFamily.getName()) != null;
}
catch (Throwable t) {
return false;
}
}
@Override
public void shutdown() {
}
@Override
public Collection<String> listIds() throws PersistenceException {
final List<String> ids = Lists.newArrayList();
try {
new AllRowsReader.Builder<String, String>(keyspace, columnFamily)
.withIncludeEmptyRows(false)
.forEachRow(new Function<Row<String,String>, Boolean>() {
@Override
public Boolean apply(Row<String, String> row) {
ids.add(row.getKey());
return true;
}
})
.build()
.call();
} catch (Exception e) {
throw new PersistenceException("Error trying to fetch row ids", e);
}
return ids;
}
@Override
public Collection<T> read(Collection<String> keys) throws PersistenceException {
return this.manager.get(keys);
}
}
| 3,269 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/IndexerException.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
public class IndexerException extends Exception {
public IndexerException(String message, Exception e) {
super(message, e);
}
public IndexerException(String message) {
super(message);
}
}
| 3,270 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/DaoKeys.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
import com.netflix.paas.SchemaNames;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.cassandra.entity.ColumnFamilyEntity;
import com.netflix.paas.cassandra.entity.KeyspaceEntity;
import com.netflix.paas.dao.DaoKey;
import com.netflix.paas.entity.ClusterEntity;
public class DaoKeys {
public final static DaoKey<KeyspaceEntity> DAO_KEYSPACE_ENTITY
= new DaoKey<KeyspaceEntity> (SchemaNames.CONFIGURATION.name(), KeyspaceEntity.class);
public final static DaoKey<CassandraClusterEntity> DAO_CASSANDRA_CLUSTER_ENTITY
= new DaoKey<CassandraClusterEntity>(SchemaNames.CONFIGURATION.name(), CassandraClusterEntity.class);
public final static DaoKey<ColumnFamilyEntity> DAO_COLUMN_FAMILY_ENTITY
= new DaoKey<ColumnFamilyEntity> (SchemaNames.CONFIGURATION.name(), ColumnFamilyEntity.class);
}
| 3,271 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/MetaDaoImpl.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
import com.google.inject.Inject;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.paas.json.JsonObject;
import com.netflix.paas.meta.dao.MetaDao;
import com.netflix.paas.meta.entity.Entity;
import com.netflix.paas.meta.entity.PaasTableEntity;
import com.netflix.paas.cassandra.provider.KeyspaceClientProvider;
public class MetaDaoImpl implements MetaDao{
KeyspaceClientProvider kscp;
public static ColumnFamily<String, String> dbcf = ColumnFamily
.newColumnFamily(
"db",
StringSerializer.get(),
StringSerializer.get());
@Inject
public MetaDaoImpl(KeyspaceClientProvider kscp) {
this.kscp = kscp;
}
@Override
public void writeMetaEntity(Entity entity) {
// TODO Auto-generated method stub
Keyspace ks = kscp.acquireKeyspace("meta");
ks.prepareMutationBatch();
MutationBatch m;
OperationResult<Void> result;
m = ks.prepareMutationBatch();
m.withRow(dbcf, entity.getRowKey()).putColumn(entity.getName(), entity.getPayLoad(), null);
try {
result = m.execute();
if (entity instanceof PaasTableEntity) {
String schemaName = ((PaasTableEntity)entity).getSchemaName();
Keyspace schemaks = kscp.acquireKeyspace(schemaName);
ColumnFamily<String, String> cf = ColumnFamily.newColumnFamily(entity.getName(), StringSerializer.get(), StringSerializer.get());
schemaks.createColumnFamily(cf, null);
}
int i = 0;
} catch (ConnectionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public Entity readMetaEntity(String rowKey) {
// TODO Auto-generated method stub
return null;
}
@Override
public void writeRow(String db, String table, JsonObject rowObj) {
// TODO Auto-generated method stub
}
@Override
public String listRow(String db, String table, String keycol, String key) {
// TODO Auto-generated method stub
return null;
}
}
| 3,272 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/dao/astyanax/SimpleReverseIndexer.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.dao.astyanax;
import java.util.Collection;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.annotations.Component;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.util.TimeUUIDUtils;
/**
* Very very very simple and inefficient tagger that stores a single row per tag.
* Use this when storing and tagging a relatively small number of 'documents'.
* The tagger works on equality and does not provide prefix or wildcard searches
*
* RowKey: <TagName>
* Column: <ForeignKey><VersionUUID>
*
* @author elandau
*
*/
public class SimpleReverseIndexer implements Indexer {
private final static String ID_PREFIX = "$";
/**
* Composite entry within the index CF
* @author elandau
*
*/
public static class IndexEntry {
public IndexEntry() {
}
public IndexEntry(String id, UUID uuid) {
this.id = id;
this.version = uuid;
}
@Component(ordinal = 0)
public String id;
@Component(ordinal = 1)
public UUID version;
}
private static final AnnotatedCompositeSerializer<IndexEntry> EntrySerializer = new AnnotatedCompositeSerializer<IndexEntry>(IndexEntry.class);
/**
* Builder pattern
* @author elandau
*/
public static class Builder {
private Keyspace keyspace;
private String columnFamily;
public Builder withKeyspace(Keyspace keyspace) {
this.keyspace = keyspace;
return this;
}
public Builder withColumnFamily(String columnFamily) {
this.columnFamily = columnFamily;
return this;
}
public SimpleReverseIndexer build() {
return new SimpleReverseIndexer(this);
}
}
public static Builder builder() {
return new Builder();
}
private Keyspace keyspace;
private ColumnFamily<String, IndexEntry> indexCf;
private ColumnFamily<String, String> dataCf;
private SimpleReverseIndexer(Builder builder) {
indexCf = new ColumnFamily<String, IndexEntry>(builder.columnFamily + "_idx", StringSerializer.get(), EntrySerializer);
dataCf = new ColumnFamily<String, String> (builder.columnFamily + "_data", StringSerializer.get(), StringSerializer.get());
keyspace = builder.keyspace;
}
@Override
public Collection<String> findUnion(Map<String, String> tags) throws IndexerException {
Set<String> ids = Sets.newHashSet();
MutationBatch mb = keyspace.prepareMutationBatch();
try {
for (Row<String, IndexEntry> row : keyspace.prepareQuery(indexCf).getKeySlice(fieldsToSet(tags)).execute().getResult()) {
ColumnListMutation<IndexEntry> mrow = null;
IndexEntry previousEntry = null;
for (Column<IndexEntry> column : row.getColumns()) {
IndexEntry entry = column.getName();
if (previousEntry != null && entry.id == previousEntry.id) {
if (mrow == null)
mrow = mb.withRow(indexCf, row.getKey());
mrow.deleteColumn(previousEntry);
}
ids.add(entry.id);
}
}
} catch (ConnectionException e) {
throw new IndexerException("Failed to get tags : " + tags, e);
} finally {
try {
mb.execute();
}
catch (Exception e) {
// OK to ignore
}
}
return ids;
}
private Collection<String> fieldsToSet(Map<String, String> tags) {
return Collections2.transform(tags.entrySet(), new Function<Entry<String, String>, String>() {
public String apply(Entry<String, String> entry) {
return entry.getKey() + "=" + entry.getValue();
}
});
}
@Override
public Collection<String> findIntersection(Map<String, String> tags) throws IndexerException {
Set<String> ids = Sets.newHashSet();
MutationBatch mb = keyspace.prepareMutationBatch();
try {
boolean first = true;
Set<Entry<String, String>> elements = tags.entrySet();
for (Row<String, IndexEntry> row : keyspace.prepareQuery(indexCf).getKeySlice(fieldsToSet(tags)).execute().getResult()) {
Set<String> rowIds = Sets.newHashSet();
ColumnListMutation<IndexEntry> mrow = null;
IndexEntry previousEntry = null;
for (Column<IndexEntry> column : row.getColumns()) {
IndexEntry entry = column.getName();
if (previousEntry != null && entry.id == previousEntry.id) {
if (mrow == null)
mrow = mb.withRow(indexCf, row.getKey());
mrow.deleteColumn(previousEntry);
}
rowIds.add(entry.id);
}
if (first) {
first = false;
ids = rowIds;
}
else {
ids = Sets.intersection(ids, rowIds);
if (ids.isEmpty())
return ids;
}
}
} catch (ConnectionException e) {
throw new IndexerException("Failed to get tags : " + tags, e);
} finally {
try {
mb.execute();
}
catch (ConnectionException e) {
// OK to ignore
}
}
return ids;
}
@Override
public Collection<String> find(String field, String value) throws IndexerException {
Set<String> ids = Sets.newHashSet();
String indexRowKey = field + "=" + value;
MutationBatch mb = keyspace.prepareMutationBatch();
try {
boolean first = true;
ColumnList<IndexEntry> row = keyspace.prepareQuery(indexCf).getRow(indexRowKey).execute().getResult();
IndexEntry previousEntry = null;
for (Column<IndexEntry> column : row) {
IndexEntry entry = column.getName();
ColumnListMutation<IndexEntry> mrow = null;
if (previousEntry != null && entry.id == previousEntry.id) {
if (mrow == null)
mrow = mb.withRow(indexCf, indexRowKey);
mrow.deleteColumn(previousEntry);
}
else {
ids.add(entry.id);
}
}
} catch (ConnectionException e) {
throw new IndexerException("Failed to get tag : " + indexRowKey, e);
} finally {
try {
mb.execute();
}
catch (ConnectionException e) {
// OK to ignore
}
}
return ids;
}
@Override
public void tagId(String id, Map<String, String> tags) throws IndexerException {
MutationBatch mb = keyspace.prepareMutationBatch();
ColumnListMutation<String> idRow = mb.withRow(dataCf, id);
UUID uuid = TimeUUIDUtils.getUniqueTimeUUIDinMicros();
for (Map.Entry<String, String> tag : tags.entrySet()) {
String rowkey = tag.getKey() + "=" + tag.getValue();
System.out.println("Rowkey: " + rowkey);
mb.withRow(indexCf, tag.getKey() + "=" + tag.getValue())
.putEmptyColumn(new IndexEntry(id, uuid));
// idRow.putColumn(tag.getKey(), tag.getValue());
}
try {
mb.execute();
} catch (ConnectionException e) {
throw new IndexerException("Failed to store tags : " + tags + " for id " + id, e);
}
}
@Override
public void removeId(String id) throws IndexerException {
// TODO Auto-generated method stub
}
@Override
public void createStorage() throws IndexerException {
try {
keyspace.createColumnFamily(indexCf, ImmutableMap.<String, Object>builder()
.put("comparator_type", "CompositeType(UTF8Type, TimeUUIDType)")
.build());
} catch (ConnectionException e) {
e.printStackTrace();
}
try {
keyspace.createColumnFamily(dataCf, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "LongType")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
} catch (ConnectionException e) {
e.printStackTrace();
}
}
@Override
public Map<String, String> getTags(String id) throws IndexerException {
try {
ColumnList<String> fields = keyspace.prepareQuery(dataCf).getRow(id).execute().getResult();
Map<String, String> mapped = Maps.newHashMap();
for (Column<String> column : fields) {
mapped.put(column.getName(), column.getStringValue());
}
return mapped;
} catch (ConnectionException e) {
throw new IndexerException("Failed to get tags for id " + id, e);
}
}
}
| 3,273 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/PaasCassandraBootstrap.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.netflix.paas.PaasBootstrap;
import com.netflix.paas.SchemaNames;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoProvider;
import com.netflix.paas.dao.DaoSchema;
public class PaasCassandraBootstrap {
private static final Logger LOG = LoggerFactory.getLogger(PaasBootstrap.class);
@Inject
public PaasCassandraBootstrap(DaoProvider daoProvider) throws Exception {
LOG.info("Bootstrap PaasAstyanax");
DaoSchema schemaDao = daoProvider.getSchema(SchemaNames.CONFIGURATION.name());
if (!schemaDao.isExists()) {
schemaDao.createSchema();
}
Dao<CassandraClusterEntity> clusterDao = daoProvider.getDao(SchemaNames.CONFIGURATION.name(), CassandraClusterEntity.class);
if (!clusterDao.isExists()) {
clusterDao.createTable();
}
}
}
| 3,274 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/CassandraPaasModule.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Scopes;
import com.google.inject.assistedinject.FactoryModuleBuilder;
import com.google.inject.multibindings.MapBinder;
import com.netflix.paas.cassandra.admin.CassandraClusterAdminResource;
import com.netflix.paas.cassandra.admin.CassandraClusterAdminResourceFactory;
import com.netflix.paas.cassandra.admin.CassandraSystemAdminResource;
import com.netflix.paas.cassandra.discovery.ClusterDiscoveryService;
import com.netflix.paas.cassandra.discovery.LocalClusterDiscoveryService;
import com.netflix.paas.cassandra.provider.AstyanaxConfigurationProvider;
import com.netflix.paas.cassandra.provider.AstyanaxConnectionPoolConfigurationProvider;
import com.netflix.paas.cassandra.provider.AstyanaxConnectionPoolMonitorProvider;
import com.netflix.paas.cassandra.provider.CassandraTableResourceFactory;
import com.netflix.paas.cassandra.provider.ClusterClientProvider;
import com.netflix.paas.cassandra.provider.HostSupplierProvider;
import com.netflix.paas.cassandra.provider.KeyspaceClientProvider;
import com.netflix.paas.cassandra.provider.impl.DefaultAstyanaxConfigurationProvider;
import com.netflix.paas.cassandra.provider.impl.DefaultAstyanaxConnectionPoolConfigurationProvider;
import com.netflix.paas.cassandra.provider.impl.DefaultAstyanaxConnectionPoolMonitorProvider;
import com.netflix.paas.cassandra.provider.impl.DefaultAstyanaxClusterClientProvider;
import com.netflix.paas.cassandra.provider.impl.DefaultKeyspaceClientProvider;
import com.netflix.paas.cassandra.provider.impl.LocalHostSupplierProvider;
import com.netflix.paas.cassandra.resources.admin.AstyanaxThriftClusterAdminResource;
import com.netflix.paas.cassandra.tasks.ClusterDiscoveryTask;
import com.netflix.paas.cassandra.tasks.ClusterRefreshTask;
import com.netflix.paas.dao.DaoSchemaProvider;
import com.netflix.paas.dao.astyanax.AstyanaxDaoSchemaProvider;
import com.netflix.paas.provider.TableDataResourceFactory;
import com.netflix.paas.resources.impl.JerseySchemaDataResourceImpl;
public class CassandraPaasModule extends AbstractModule {
private static final Logger LOG = LoggerFactory.getLogger(CassandraPaasModule.class);
@Override
protected void configure() {
LOG.info("Loading CassandraPaasModule");
// There will be a different TableResourceProvider for each persistence technology
MapBinder<String, TableDataResourceFactory> tableResourceProviders = MapBinder.newMapBinder(binder(), String.class, TableDataResourceFactory.class);
tableResourceProviders.addBinding("cassandra").to(CassandraTableResourceFactory.class).in(Scopes.SINGLETON);
// Binding to enable DAOs using astyanax
MapBinder<String, DaoSchemaProvider> daoManagers = MapBinder.newMapBinder(binder(), String.class, DaoSchemaProvider.class);
daoManagers.addBinding("astyanax").to(AstyanaxDaoSchemaProvider.class).in(Scopes.SINGLETON);
bind(AstyanaxConfigurationProvider.class) .to(DefaultAstyanaxConfigurationProvider.class).in(Scopes.SINGLETON);
bind(AstyanaxConnectionPoolConfigurationProvider.class).to(DefaultAstyanaxConnectionPoolConfigurationProvider.class).in(Scopes.SINGLETON);
bind(AstyanaxConnectionPoolMonitorProvider.class) .to(DefaultAstyanaxConnectionPoolMonitorProvider.class).in(Scopes.SINGLETON);
bind(KeyspaceClientProvider.class) .to(DefaultKeyspaceClientProvider.class).in(Scopes.SINGLETON);
bind(ClusterClientProvider.class) .to(DefaultAstyanaxClusterClientProvider.class).in(Scopes.SINGLETON);
install(new FactoryModuleBuilder()
.implement(CassandraClusterAdminResource.class, AstyanaxThriftClusterAdminResource.class)
.build(CassandraClusterAdminResourceFactory.class));
// REST resources
bind(ClusterDiscoveryService.class).to(LocalClusterDiscoveryService.class);
bind(CassandraSystemAdminResource.class).in(Scopes.SINGLETON);
bind(JerseySchemaDataResourceImpl.class).in(Scopes.SINGLETON);
MapBinder<String, HostSupplierProvider> hostSuppliers = MapBinder.newMapBinder(binder(), String.class, HostSupplierProvider.class);
hostSuppliers.addBinding("local").to(LocalHostSupplierProvider.class).in(Scopes.SINGLETON);
// Tasks
bind(ClusterDiscoveryTask.class);
bind(ClusterRefreshTask.class);
}
}
| 3,275 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/MetaModule.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.Cluster;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.name.Named;
import com.netflix.paas.PaasModule;
import com.netflix.paas.dao.astyanax.MetaDaoImpl;
import com.netflix.paas.dao.meta.CqlMetaDaoImpl;
import com.netflix.paas.meta.dao.MetaDao;
public class MetaModule extends AbstractModule{
private static final Logger LOG = LoggerFactory.getLogger(MetaModule.class);
@Override
protected void configure() {
// TODO Auto-generated method stub
// bind(MetaDao.class).to(MetaDaoImpl.class).asEagerSingleton();
bind(MetaDao.class).to(CqlMetaDaoImpl.class).asEagerSingleton();
}
@Provides
Cluster provideCluster(@Named("clustername") String clustername) {
//String nodes = eureka.getNodes(clustername);
//get nodes in the cluster, to pass as parameters to the underlying apis
Cluster cluster = Cluster.builder().addContactPoint("localhost").build();
return cluster;
}
}
| 3,276 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/MetaCassandraBootstrap.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.netflix.paas.PaasBootstrap;
import com.netflix.paas.SchemaNames;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoProvider;
import com.netflix.paas.dao.DaoSchema;
public class MetaCassandraBootstrap {
private static final Logger LOG = LoggerFactory.getLogger(PaasBootstrap.class);
@Inject
public MetaCassandraBootstrap(DaoProvider daoProvider) throws Exception {
LOG.info("Bootstrap Meta Cassandra");
DaoSchema schemaDao = daoProvider.getSchema(SchemaNames.META.name());
if (!schemaDao.isExists()) {
schemaDao.createSchema();
}
Dao<CassandraClusterEntity> clusterDao = daoProvider.getDao(SchemaNames.META.name(), CassandraClusterEntity.class);
if (!clusterDao.isExists()) {
clusterDao.createTable();
}
}
}
| 3,277 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/tasks/ClearSchemasTask.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.tasks;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.paas.tasks.Task;
import com.netflix.paas.tasks.TaskContext;
public class ClearSchemasTask implements Task {
private static final Logger LOG = LoggerFactory.getLogger(ClearSchemasTask.class);
@Override
public void execte(TaskContext context) throws Exception {
}
}
| 3,278 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/tasks/ClusterRefreshTask.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.tasks;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.MapDifference;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.collect.Sets.SetView;
import com.google.inject.Inject;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.ddl.ColumnDefinition;
import com.netflix.astyanax.ddl.ColumnFamilyDefinition;
import com.netflix.astyanax.ddl.FieldMetadata;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.paas.JsonSerializer;
import com.netflix.paas.SchemaNames;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.cassandra.entity.MapStringToObject;
import com.netflix.paas.cassandra.keys.ClusterKey;
import com.netflix.paas.cassandra.provider.ClusterClientProvider;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoProvider;
import com.netflix.paas.tasks.Task;
import com.netflix.paas.tasks.TaskContext;
/**
* Refresh the information for a cluster
*
* @author elandau
*
*/
public class ClusterRefreshTask implements Task {
private static Logger LOG = LoggerFactory.getLogger(ClusterRefreshTask.class);
private final ClusterClientProvider provider;
private final Dao<CassandraClusterEntity> clusterDao;
@Inject
public ClusterRefreshTask(ClusterClientProvider provider, DaoProvider daoProvider) throws Exception {
this.provider = provider;
this.clusterDao = daoProvider.getDao(SchemaNames.CONFIGURATION.name(), CassandraClusterEntity.class);
}
@Override
public void execte(TaskContext context) throws Exception{
// Get parameters from the context
String clusterName = context.getStringParameter("cluster");
Boolean ignoreSystem = context.getBooleanParameter("ignoreSystem", true);
CassandraClusterEntity entity = (CassandraClusterEntity)context.getParamater("entity");
LOG.info("Refreshing cluster " + clusterName);
// Read the current state from the DAO
// CassandraClusterEntity entity = clusterDao.read(clusterName);
Map<String, String> existingKeyspaces = entity.getKeyspaces();
if (existingKeyspaces == null) {
existingKeyspaces = Maps.newHashMap();
entity.setKeyspaces(existingKeyspaces);
}
Map<String, String> existingColumnFamilies = entity.getColumnFamilies();
if (existingColumnFamilies == null) {
existingColumnFamilies = Maps.newHashMap();
entity.setColumnFamilies(existingColumnFamilies);
}
Set<String> foundKeyspaces = Sets.newHashSet();
Set<String> foundColumnFamilies = Sets.newHashSet();
Cluster cluster = provider.acquireCluster(new ClusterKey(entity.getClusterName(), entity.getDiscoveryType()));
boolean changed = false;
// // Iterate found keyspaces
try {
for (KeyspaceDefinition keyspace : cluster.describeKeyspaces()) {
// Extract data from the KeyspaceDefinition
String ksName = keyspace.getName();
MapStringToObject keyspaceOptions = getKeyspaceOptions(keyspace);
if (existingKeyspaces.containsKey(ksName)) {
MapStringToObject previousOptions = JsonSerializer.fromString(existingKeyspaces.get(ksName), MapStringToObject.class);
MapDifference keyspaceDiff = Maps.difference(keyspaceOptions, previousOptions);
if (keyspaceDiff.areEqual()) {
LOG.info("Keyspace '{}' didn't change", new Object[]{ksName});
}
else {
changed = true;
LOG.info("CF Changed: " + keyspaceDiff.entriesDiffering());
}
}
else {
changed = true;
}
String strKeyspaceOptions = JsonSerializer.toString(keyspaceOptions);
// // Keep track of keyspace
foundKeyspaces.add(keyspace.getName());
existingKeyspaces.put(ksName, strKeyspaceOptions);
LOG.info("Found keyspace '{}|{}' : {}", new Object[]{entity.getClusterName(), ksName, keyspaceOptions});
// // Iterate found column families
for (ColumnFamilyDefinition cf : keyspace.getColumnFamilyList()) {
// Extract data from the ColumnFamilyDefinition
String cfName = String.format("%s|%s", keyspace.getName(), cf.getName());
MapStringToObject cfOptions = getColumnFamilyOptions(cf);
String strCfOptions = JsonSerializer.toString(cfOptions);
//
// // Check for changes
if (existingColumnFamilies.containsKey(cfName)) {
MapStringToObject previousOptions = JsonSerializer.fromString(existingColumnFamilies.get(cfName), MapStringToObject.class);
LOG.info("Old options: " + previousOptions);
MapDifference cfDiff = Maps.difference(cfOptions, previousOptions);
if (cfDiff.areEqual()) {
LOG.info("CF '{}' didn't change", new Object[]{cfName});
}
else {
changed = true;
LOG.info("CF Changed: " + cfDiff.entriesDiffering());
}
}
else {
changed = true;
}
//
// // Keep track of the cf
foundColumnFamilies.add(cfName);
existingColumnFamilies.put(cfName, strCfOptions);
LOG.info("Found column family '{}|{}|{}' : {}", new Object[]{entity.getClusterName(), keyspace.getName(), cf.getName(), strCfOptions});
}
}
}
catch (Exception e) {
LOG.info("Error refreshing cluster: " + entity.getClusterName(), e);
entity.setEnabled(false);
}
SetView<String> ksRemoved = Sets.difference(existingKeyspaces.keySet(), foundKeyspaces);
LOG.info("Keyspaces removed: " + ksRemoved);
SetView<String> cfRemoved = Sets.difference(existingColumnFamilies.keySet(), foundColumnFamilies);
LOG.info("CF removed: " + cfRemoved);
clusterDao.write(entity);
}
private MapStringToObject getKeyspaceOptions(KeyspaceDefinition keyspace) {
MapStringToObject result = new MapStringToObject();
for (FieldMetadata field : keyspace.getFieldsMetadata()) {
result.put(field.getName(), keyspace.getFieldValue(field.getName()));
}
result.remove("CF_DEFS");
return result;
}
private MapStringToObject getColumnFamilyOptions(ColumnFamilyDefinition cf) {
MapStringToObject result = new MapStringToObject();
for (FieldMetadata field : cf.getFieldsMetadata()) {
if (field.getName().equals("COLUMN_METADATA")) {
// // This will get handled below
}
else {
Object value = cf.getFieldValue(field.getName());
if (value instanceof ByteBuffer) {
result.put(field.getName(), ((ByteBuffer)value).array());
}
else {
result.put(field.getName(), value);
}
}
}
// // Hack to get the column metadata
List<MapStringToObject> columns = Lists.newArrayList();
for (ColumnDefinition column : cf.getColumnDefinitionList()) {
MapStringToObject map = new MapStringToObject();
for (FieldMetadata field : column.getFieldsMetadata()) {
Object value = column.getFieldValue(field.getName());
if (value instanceof ByteBuffer) {
result.put(field.getName(), ((ByteBuffer)value).array());
}
else {
map.put(field.getName(), value);
}
}
columns.add(map);
}
result.put("COLUMN_METADATA", columns);
return result;
}
}
| 3,279 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/tasks/ClusterToVirtualSchemaTask.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.tasks;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.paas.tasks.Task;
import com.netflix.paas.tasks.TaskContext;
/**
* Load a schema into a task
* @author elandau
*
*/
public class ClusterToVirtualSchemaTask implements Task {
private static final Logger LOG = LoggerFactory.getLogger(ClusterToVirtualSchemaTask.class);
@Override
public void execte(TaskContext context) throws Exception {
String clusterName = context.getStringParameter("cluster");
}
}
| 3,280 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/tasks/ClusterDiscoveryTask.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.tasks;
import java.util.Collection;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.netflix.paas.SchemaNames;
import com.netflix.paas.cassandra.discovery.ClusterDiscoveryService;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoProvider;
import com.netflix.paas.exceptions.NotFoundException;
import com.netflix.paas.tasks.Task;
import com.netflix.paas.tasks.TaskContext;
import com.netflix.paas.tasks.TaskManager;
/**
* Task to compare the list of clusters in the Dao and the list of clusters from the discovery
* service and add/remove/update in response to any changes.
*
* @author elandau
*
*/
public class ClusterDiscoveryTask implements Task {
private static final Logger LOG = LoggerFactory.getLogger(ClusterDiscoveryTask.class);
private final ClusterDiscoveryService discoveryService;
private final Dao<CassandraClusterEntity> clusterDao;
private final TaskManager taskManager;
@Inject
public ClusterDiscoveryTask(
ClusterDiscoveryService discoveryService,
DaoProvider daoProvider,
TaskManager taskManager) throws NotFoundException{
this.discoveryService = discoveryService;
this.clusterDao = daoProvider.getDao(SchemaNames.CONFIGURATION.name(), CassandraClusterEntity.class);
this.taskManager = taskManager;
}
@Override
public void execte(TaskContext context) throws Exception {
// Get complete set of existing clusters from the discovery service
Collection<String> clusters = Sets.newHashSet(discoveryService.getClusterNames());
LOG.info(clusters.toString());
// Load entire list of previously known clusters to a map of <ClusterName> => <ClusterEntity>
Map<String, CassandraClusterEntity> existingClusters = Maps.uniqueIndex(
this.clusterDao.list(),
new Function<CassandraClusterEntity, String>() {
@Override
public String apply(CassandraClusterEntity cluster) {
LOG.info("Found existing cluster : " + cluster.getClusterName());
return cluster.getClusterName();
}
});
// Iterate through new list of clusters and look for changes
for (String clusterName : clusters) {
CassandraClusterEntity entity = existingClusters.get(clusterName);
// This is a new cluster
if (entity == null) {
LOG.info("Found new cluster : " + clusterName);
entity = CassandraClusterEntity.builder()
.withName(clusterName)
.withIsEnabled(true)
.withDiscoveryType(discoveryService.getName())
.build();
try {
clusterDao.write(entity);
}
catch (Exception e) {
LOG.warn("Failed to persist cluster info for '{}'", new Object[]{clusterName, e});
}
updateCluster(entity);
}
// We knew about it before and it is disabled
else if (!entity.isEnabled()) {
LOG.info("Cluster '{}' is disabled and will not be refreshed", new Object[]{clusterName});
}
// Refresh the info for an existing cluster
else {
LOG.info("Cluster '{}' is being refreshed", new Object[]{clusterName});
if (entity.getDiscoveryType() == null) {
entity.setDiscoveryType(discoveryService.getName());
try {
clusterDao.write(entity);
}
catch (Exception e) {
LOG.warn("Failed to persist cluster info for '{}'", new Object[]{clusterName, e});
}
}
updateCluster(entity);
}
}
}
private void updateCluster(CassandraClusterEntity entity) {
LOG.info("Need to update cluster " + entity.getClusterName());
try {
taskManager.submit(ClusterRefreshTask.class, ImmutableMap.<String, Object>builder()
.put("entity", entity)
.build());
} catch (Exception e) {
LOG.warn("Failed to create ClusterRefreshTask for " + entity.getClusterName(), e);
}
}
}
| 3,281 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/entity/MapStringToObject.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.entity;
import java.util.HashMap;
public class MapStringToObject extends HashMap<String, Object> {
}
| 3,282 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/entity/CassandraClusterEntity.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.entity;
import java.util.Collection;
import java.util.Map;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import org.apache.commons.lang.StringUtils;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
@Entity
public class CassandraClusterEntity {
public static class Builder {
private final CassandraClusterEntity entity = new CassandraClusterEntity();
public Builder withName(String name) {
entity.clusterName = name;
return this;
}
public Builder withIsEnabled(Boolean enabled) {
entity.enabled = enabled;
return this;
}
public Builder withDiscoveryType(String discoveryType) {
entity.discoveryType = discoveryType;
return this;
}
public CassandraClusterEntity build() {
Preconditions.checkNotNull(entity.clusterName);
return this.entity;
}
}
public static Builder builder() {
return new Builder();
}
@Id
private String clusterName;
@Column
private Map<String, String> keyspaces;
@Column
private Map<String, String> columnFamilies;
@Column(name="enabled")
private boolean enabled = true;
@Column(name="discovery")
private String discoveryType;
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public Map<String, String> getKeyspaces() {
return keyspaces;
}
public void setKeyspaces(Map<String, String> keyspaces) {
this.keyspaces = keyspaces;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public Collection<String> getColumnFamilyNames() {
if (this.columnFamilies == null)
return Lists.newArrayList();
return columnFamilies.keySet();
}
public Collection<String> getKeyspaceNames() {
if (this.keyspaces == null)
return Lists.newArrayList();
return keyspaces.keySet();
}
public Collection<String> getKeyspaceColumnFamilyNames(final String keyspaceName) {
return Collections2.filter(this.columnFamilies.keySet(), new Predicate<String>() {
@Override
public boolean apply(String cfName) {
return StringUtils.startsWith(cfName, keyspaceName + "|");
}
});
}
public Map<String, String> getColumnFamilies() {
return columnFamilies;
}
public void setColumnFamilies(Map<String, String> columnFamilies) {
this.columnFamilies = columnFamilies;
}
public String getDiscoveryType() {
return discoveryType;
}
public void setDiscoveryType(String discoveryType) {
this.discoveryType = discoveryType;
}
}
| 3,283 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/entity/ColumnFamilyEntity.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.entity;
import java.util.Map;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.PostLoad;
import javax.persistence.PrePersist;
import com.google.common.base.Preconditions;
@Entity
public class ColumnFamilyEntity {
public static class Builder {
private final ColumnFamilyEntity entity = new ColumnFamilyEntity();
public Builder withName(String name) {
entity.name = name;
return this;
}
public Builder withKeyspace(String name) {
entity.keyspaceName = name;
return this;
}
public Builder withCluster(String name) {
entity.clusterName = name;
return this;
}
public Builder withOptions(Map<String, String> options) {
entity.setOptions(options);
return this;
}
public ColumnFamilyEntity build() {
Preconditions.checkNotNull(entity.name);
return this.entity;
}
}
public static Builder builder() {
return new Builder();
}
@PrePersist
private void prePersist() {
this.id = String.format("%s.%s.%s", clusterName, keyspaceName, name);
}
@PostLoad
private void postLoad() {
this.id = String.format("%s.%s.%s", clusterName, keyspaceName, name);
}
@Id
private String id;
@Column(name="name")
private String name;
@Column(name="keyspace")
private String keyspaceName;
@Column(name="cluster")
private String clusterName;
/**
* Low level Cassandra column family configuration parameters
*/
@Column(name="options")
private Map<String, String> options;
public String getKeyspaceName() {
return keyspaceName;
}
public void setKeyspaceName(String keyspaceName) {
this.keyspaceName = keyspaceName;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Map<String, String> getOptions() {
return options;
}
public void setOptions(Map<String, String> options) {
this.options = options;
}
}
| 3,284 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/entity/KeyspaceEntity.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.entity;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.PostLoad;
import javax.persistence.PrePersist;
import javax.persistence.Transient;
import com.google.common.base.Preconditions;
import com.netflix.paas.exceptions.NotFoundException;
@Entity
public class KeyspaceEntity {
public static class Builder {
private final KeyspaceEntity entity = new KeyspaceEntity();
public Builder withName(String name) {
entity.name = name;
return this;
}
public Builder addColumnFamily(String columnFamilyName) {
if (entity.getColumnFamilies() == null) {
entity.setColumnFamilies(new HashSet<String>());
}
entity.getColumnFamilies().add(columnFamilyName);
return this;
}
public Builder withOptions(Map<String, String> options) {
entity.setOptions(options);
return this;
}
public KeyspaceEntity build() {
return this.entity;
}
}
public static Builder builder() {
return new Builder();
}
@Id
private String id;
@Column(name="name")
private String name;
@Column(name="cluster")
private String clusterName;
@Column(name="options")
private Map<String, String> options;
@Column(name="cfs")
private Set<String> columnFamilies;
@Transient
private Map<String, ColumnFamilyEntity> columnFamilyEntities;
@PrePersist
private void prePersist() {
this.id = String.format("%s.%s", clusterName, name);
}
@PostLoad
private void postLoad() {
this.id = String.format("%s.%s", clusterName, name);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Map<String, ColumnFamilyEntity> getColumnFamilyEntities() {
return columnFamilyEntities;
}
public void setColumnFamilyEntities(Map<String, ColumnFamilyEntity> columnFamilyEntities) {
this.columnFamilyEntities = columnFamilyEntities;
}
public Map<String, String> getOptions() {
return options;
}
public void setOptions(Map<String, String> options) {
this.options = options;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public Set<String> getColumnFamilies() {
return columnFamilies;
}
public void setColumnFamilies(Set<String> columnFamilies) {
this.columnFamilies = columnFamilies;
}
public ColumnFamilyEntity getColumnFamily(String columnFamilyName) throws NotFoundException {
ColumnFamilyEntity entity = columnFamilyEntities.get(columnFamilyName);
if (entity == null)
throw new NotFoundException("columnfamily", columnFamilyName);
return entity;
}
}
| 3,285 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/resources/CassandraKeyspaceHolder.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.resources;
import java.util.concurrent.ConcurrentMap;
import com.google.common.collect.Maps;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.paas.exceptions.AlreadyExistsException;
import com.netflix.paas.exceptions.NotFoundException;
import com.netflix.paas.resources.TableDataResource;
/**
* Tracks a keyspace and column families that are accessible on it to this instance
*
* @author elandau
*
*/
public class CassandraKeyspaceHolder {
private final AstyanaxContext<Keyspace> context;
private final ConcurrentMap<String, TableDataResource> columnFamilies = Maps.newConcurrentMap();
public CassandraKeyspaceHolder(AstyanaxContext<Keyspace> context) {
this.context = context;
}
/**
* Register a column family on this keyspace and create the appropriate DataTableResource
* to read from it.
*
* @param columnFamily
* @throws AlreadyExistsException
*/
public synchronized void registerColumnFamily(String columnFamily) throws AlreadyExistsException {
if (columnFamilies.containsKey(columnFamily))
throw new AlreadyExistsException("columnfamily", columnFamily);
columnFamilies.put(columnFamily, new AstyanaxThriftDataTableResource(context.getClient(), columnFamily));
}
/**
* Unregister a column family so that it is no longer available
* @param columnFamily
* @throws NotFoundException
*/
public synchronized void unregisterColumnFamily(String columnFamily) throws NotFoundException {
columnFamilies.remove(columnFamily);
}
/**
* Retrieve a register column family resource
*
* @param columnFamily
* @return
* @throws NotFoundException
*/
public TableDataResource getColumnFamilyDataResource(String columnFamily) throws NotFoundException {
TableDataResource resource = columnFamilies.get(columnFamily);
if (resource == null)
throw new NotFoundException("columnfamily", columnFamily);
return resource;
}
public void shutdown() {
this.context.shutdown();
}
public void initialize() {
this.context.start();
}
}
| 3,286 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/resources/AstyanaxThriftDataTableResource.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.resources;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.Map.Entry;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.SerializerPackage;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.partitioner.Partitioner;
import com.netflix.astyanax.query.RowQuery;
import com.netflix.astyanax.serializers.ByteBufferSerializer;
import com.netflix.astyanax.util.RangeBuilder;
import com.netflix.paas.data.QueryResult;
import com.netflix.paas.data.RowData;
import com.netflix.paas.data.SchemalessRows;
import com.netflix.paas.exceptions.NotFoundException;
import com.netflix.paas.exceptions.PaasException;
import com.netflix.paas.json.JsonObject;
import com.netflix.paas.resources.TableDataResource;
/**
* Column family REST resource
* @author elandau
*
*/
public class AstyanaxThriftDataTableResource implements TableDataResource {
private static Logger LOG = LoggerFactory.getLogger(AstyanaxThriftDataTableResource.class);
private final Keyspace keyspace;
private final ColumnFamily<ByteBuffer, ByteBuffer> columnFamily;
private volatile SerializerPackage serializers;
public AstyanaxThriftDataTableResource(Keyspace keyspace, String name) {
this.keyspace = keyspace;
this.columnFamily = ColumnFamily.newColumnFamily(name, ByteBufferSerializer.get(), ByteBufferSerializer.get());
}
@Override
public QueryResult listRows(String cursor, Integer rowLimit, Integer columnLimit) throws PaasException {
try {
invariant();
// Execute the query
Partitioner partitioner = keyspace.getPartitioner();
Rows<ByteBuffer, ByteBuffer> result = keyspace
.prepareQuery(columnFamily)
.getKeyRange(null, null, cursor != null ? cursor : partitioner.getMinToken(), partitioner.getMaxToken(), rowLimit)
.execute()
.getResult();
// Convert raw data into a simple sparse tree
SchemalessRows.Builder builder = SchemalessRows.builder();
for (Row<ByteBuffer, ByteBuffer> row : result) {
Map<String, String> columns = Maps.newHashMap();
for (Column<ByteBuffer> column : row.getColumns()) {
columns.put(serializers.columnAsString(column.getRawName()), serializers.valueAsString(column.getRawName(), column.getByteBufferValue()));
}
builder.addRow(serializers.keyAsString(row.getKey()), columns);
}
QueryResult dr = new QueryResult();
dr.setSrows(builder.build());
if (!result.isEmpty()) {
dr.setCursor(partitioner.getTokenForKey(Iterables.getLast(result).getKey()));
}
return dr;
} catch (ConnectionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
@Override
public void truncateRows() {
}
@Override
public QueryResult readRow(String key, Integer columnCount, String startColumn, String endColumn, Boolean reversed) throws PaasException {
invariant();
try {
// Construct the query
RowQuery<ByteBuffer, ByteBuffer> query = keyspace
.prepareQuery(this.columnFamily)
.getRow(serializers.keyAsByteBuffer(key));
RangeBuilder range = new RangeBuilder();
if (columnCount != null && columnCount > 0) {
range.setLimit(columnCount);
}
if (startColumn != null && !startColumn.isEmpty()) {
range.setStart(serializers.columnAsByteBuffer(startColumn));
}
if (endColumn != null && !endColumn.isEmpty()) {
range.setEnd(serializers.columnAsByteBuffer(endColumn));
}
range.setReversed(reversed);
query.withColumnRange(range.build());
// Execute the query
ColumnList<ByteBuffer> result = query.execute().getResult();
// Convert raw data into a simple sparse tree
SchemalessRows.Builder builder = SchemalessRows.builder();
Map<String, String> columns = Maps.newHashMap();
if (!result.isEmpty()) {
for (Column<ByteBuffer> column : result) {
columns.put(serializers.columnAsString(column.getRawName()), serializers.valueAsString(column.getRawName(), column.getByteBufferValue()));
}
builder.addRow(key, columns);
}
QueryResult dr = new QueryResult();
dr.setSrows(builder.build());
return dr;
} catch (ConnectionException e) {
throw new PaasException(
String.format("Failed to read row '%s' in column family '%s.%s'" ,
key, this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
@Override
public void deleteRow(String key) throws PaasException {
invariant();
MutationBatch mb = keyspace.prepareMutationBatch();
mb.withRow(this.columnFamily, serializers.keyAsByteBuffer(key)).delete();
try {
mb.execute();
} catch (ConnectionException e) {
throw new PaasException(
String.format("Failed to update row '%s' in column family '%s.%s'" ,
key, this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
public void updateRow(String key, RowData rowData) throws PaasException {
LOG.info("Update row: " + rowData.toString());
invariant();
MutationBatch mb = keyspace.prepareMutationBatch();
if (rowData.hasSchemalessRows()) {
ColumnListMutation<ByteBuffer> mbRow = mb.withRow(this.columnFamily, serializers.keyAsByteBuffer(key));
for (Entry<String, Map<String, String>> row : rowData.getSrows().getRows().entrySet()) {
for (Entry<String, String> column : row.getValue().entrySet()) {
mbRow.putColumn(serializers.columnAsByteBuffer(column.getKey()),
serializers.valueAsByteBuffer(column.getKey(), column.getValue()));
}
}
}
try {
mb.execute();
} catch (ConnectionException e) {
throw new PaasException(
String.format("Failed to update row '%s' in column family '%s.%s'" ,
key, this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
@Override
public QueryResult readColumn(String key, String column) throws NotFoundException, PaasException {
invariant();
try {
Column<ByteBuffer> result = keyspace
.prepareQuery(this.columnFamily)
.getRow(serializers.keyAsByteBuffer(key))
.getColumn(serializers.columnAsByteBuffer(column))
.execute()
.getResult();
// Convert raw data into a simple sparse tree
SchemalessRows.Builder builder = SchemalessRows.builder();
Map<String, String> columns = Maps.newHashMap();
columns.put(serializers.columnAsString(result.getRawName()), serializers.valueAsString(result.getRawName(), result.getByteBufferValue()));
builder.addRow(key, columns);
QueryResult dr = new QueryResult();
dr.setSrows(builder.build());
return dr;
} catch (com.netflix.astyanax.connectionpool.exceptions.NotFoundException e) {
throw new NotFoundException(
"column",
String.format("%s.%s.%s.%s", key, column, this.keyspace.getKeyspaceName(), this.columnFamily.getName()));
} catch (ConnectionException e) {
throw new PaasException(
String.format("Failed to read row '%s' in column family '%s.%s'" ,
key, this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
@Override
public void updateColumn(String key, String column, String value) throws NotFoundException, PaasException {
LOG.info("Update row");
invariant();
MutationBatch mb = keyspace.prepareMutationBatch();
ColumnListMutation<ByteBuffer> mbRow = mb.withRow(this.columnFamily, serializers.keyAsByteBuffer(key));
mbRow.putColumn(serializers.columnAsByteBuffer(column),
serializers.valueAsByteBuffer(column, value));
try {
mb.execute();
} catch (ConnectionException e) {
throw new PaasException(
String.format("Failed to update row '%s' in column family '%s.%s'" ,
key, this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
@Override
public void deleteColumn(String key, String column) throws PaasException {
LOG.info("Update row");
invariant();
MutationBatch mb = keyspace.prepareMutationBatch();
ColumnListMutation<ByteBuffer> mbRow = mb.withRow(this.columnFamily, serializers.keyAsByteBuffer(key));
mbRow.deleteColumn(serializers.columnAsByteBuffer(column));
try {
mb.execute();
} catch (ConnectionException e) {
throw new PaasException(
String.format("Failed to update row '%s' in column family '%s.%s'" ,
key, this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
private void invariant() throws PaasException {
if (this.serializers == null)
refreshSerializers();
}
private void refreshSerializers() throws PaasException {
try {
this.serializers = this.keyspace.getSerializerPackage(this.columnFamily.getName(), true);
} catch (Exception e) {
LOG.error("Failed to get serializer package for column family '{}.{}'", new Object[]{keyspace.getKeyspaceName(), this.columnFamily.getName(), e});
throw new PaasException(
String.format("Failed to get serializer package for column family '%s.%s' in keyspace",
this.keyspace.getKeyspaceName(), this.columnFamily.getName()),
e);
}
}
@Override
@POST
@Path("{db}/{table}")
public void updateRow(@PathParam("db") String db,
@PathParam("table") String table, JsonObject rowData)
throws PaasException {
// TODO Auto-generated method stub
}
}
| 3,287 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/resources/CassandraClusterHolder.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.resources;
import java.util.concurrent.ConcurrentMap;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.Slf4jConnectionPoolMonitorImpl;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.paas.exceptions.AlreadyExistsException;
import com.netflix.paas.exceptions.NotFoundException;
/**
* Tracks accessible keyspaces for this cluster
*
* @author elandau
*/
public class CassandraClusterHolder {
private final String clusterName;
private final ConcurrentMap<String, CassandraKeyspaceHolder> keyspaces = Maps.newConcurrentMap();
public CassandraClusterHolder(String clusterName) {
this.clusterName = clusterName;
}
/**
* Register a keyspace such that a client is created for it and it is now accessible to
* this instance
*
* @param keyspaceName
* @throws AlreadyExistsException
*/
public synchronized void registerKeyspace(String keyspaceName) throws AlreadyExistsException {
Preconditions.checkNotNull(keyspaceName);
if (keyspaces.containsKey(keyspaceName)) {
throw new AlreadyExistsException("keyspace", keyspaceName);
}
CassandraKeyspaceHolder keyspace = new CassandraKeyspaceHolder(new AstyanaxContext.Builder()
.forCluster(clusterName)
.forKeyspace(keyspaceName)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(
clusterName + "_" + keyspaceName)
.setSeeds("localhost:9160"))
.withConnectionPoolMonitor(new Slf4jConnectionPoolMonitorImpl())
.buildKeyspace(ThriftFamilyFactory.getInstance()));
try {
keyspace.initialize();
}
finally {
keyspaces.put(keyspaceName, keyspace);
}
}
/**
* Unregister a keyspace so that it is no longer accessible to this instance
* @param keyspaceName
*/
public void unregisterKeyspace(String keyspaceName) {
Preconditions.checkNotNull(keyspaceName);
CassandraKeyspaceHolder keyspace = keyspaces.remove(keyspaceName);
if (keyspace != null) {
keyspace.shutdown();
}
}
/**
* Get the Keyspace holder for the specified keyspace name
*
* @param keyspaceName
* @return
* @throws NotFoundException
*/
public CassandraKeyspaceHolder getKeyspace(String keyspaceName) throws NotFoundException {
Preconditions.checkNotNull(keyspaceName);
CassandraKeyspaceHolder keyspace = keyspaces.get(keyspaceName);
if (keyspace == null)
throw new NotFoundException("keyspace", keyspaceName);
return keyspace;
}
public String getClusterName() {
return this.clusterName;
}
public void shutdown() {
// TODO
}
public void initialize() {
// TODO
}
}
| 3,288 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/resources
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/resources/admin/AstyanaxThriftClusterAdminResource.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.resources.admin;
import java.util.Collection;
import java.util.Properties;
import java.util.concurrent.ScheduledExecutorService;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.persistence.PersistenceException;
import javax.ws.rs.PathParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.eventbus.EventBus;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.name.Named;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.paas.cassandra.admin.CassandraClusterAdminResource;
import com.netflix.paas.cassandra.discovery.ClusterDiscoveryService;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.cassandra.entity.ColumnFamilyEntity;
import com.netflix.paas.cassandra.entity.KeyspaceEntity;
import com.netflix.paas.cassandra.events.ColumnFamilyDeleteEvent;
import com.netflix.paas.cassandra.events.ColumnFamilyUpdateEvent;
import com.netflix.paas.cassandra.events.KeyspaceUpdateEvent;
import com.netflix.paas.cassandra.keys.ClusterKey;
import com.netflix.paas.cassandra.keys.ColumnFamilyKey;
import com.netflix.paas.cassandra.keys.KeyspaceKey;
import com.netflix.paas.cassandra.provider.ClusterClientProvider;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoProvider;
import com.netflix.paas.dao.astyanax.DaoKeys;
import com.netflix.paas.exceptions.NotFoundException;
import com.netflix.paas.exceptions.PaasException;
/**
* Implementation of a Cassandra Cluster Admin interface using Astyanax and Thrift.
* Since this is the admin interface only one connection is actually needed to the cluster
*
* @author elandau
*/
public class AstyanaxThriftClusterAdminResource implements CassandraClusterAdminResource {
private static final Logger LOG = LoggerFactory.getLogger(AstyanaxThriftClusterAdminResource.class);
private final ClusterKey clusterKey;
private final Cluster cluster;
private final DaoProvider daoProvider;
private final EventBus eventBus;
@Inject
public AstyanaxThriftClusterAdminResource(
EventBus eventBus,
DaoProvider daoProvider,
@Named("tasks") ScheduledExecutorService taskExecutor,
ClusterDiscoveryService discoveryService,
ClusterClientProvider clusterProvider,
@Assisted ClusterKey clusterKey) {
this.clusterKey = clusterKey;
this.cluster = clusterProvider.acquireCluster(clusterKey);
this.daoProvider = daoProvider;
this.eventBus = eventBus;
}
@PostConstruct
public void initialize() {
}
@PreDestroy
public void shutdown() {
}
@Override
public CassandraClusterEntity getClusterDetails() throws PersistenceException {
return daoProvider.getDao(DaoKeys.DAO_CASSANDRA_CLUSTER_ENTITY).read(clusterKey.getCanonicalName());
}
@Override
public KeyspaceEntity getKeyspace(String keyspaceName) throws PersistenceException {
Dao<KeyspaceEntity> dao = daoProvider.getDao(DaoKeys.DAO_KEYSPACE_ENTITY);
return dao.read(keyspaceName);
}
@Override
public void createKeyspace(KeyspaceEntity keyspace) throws PaasException {
LOG.info("Creating keyspace '{}'", new Object[] {keyspace.getName()});
Preconditions.checkNotNull(keyspace, "Missing keyspace entity definition");
Properties props = new Properties();
props.putAll(getDefaultKeyspaceProperties());
if (keyspace.getOptions() != null) {
props.putAll(keyspace.getOptions());
}
props.setProperty("name", keyspace.getName());
keyspace.setClusterName(clusterKey.getClusterName());
try {
cluster.createKeyspace(props);
eventBus.post(new KeyspaceUpdateEvent(new KeyspaceKey(clusterKey, keyspace.getName())));
} catch (ConnectionException e) {
throw new PaasException(String.format("Error creating keyspace '%s' from cluster '%s'", keyspace.getName(), clusterKey.getClusterName()), e);
}
}
@Override
public void updateKeyspace(@PathParam("keyspace") String keyspaceName, KeyspaceEntity keyspace) throws PaasException {
try {
if (keyspace.getOptions() == null) {
return; // Nothing to do
}
// Add them as existing values to the properties object
Properties props = new Properties();
props.putAll(cluster.getKeyspaceProperties(keyspaceName));
props.putAll(keyspace.getOptions());
props.setProperty("name", keyspace.getName());
keyspace.setClusterName(clusterKey.getClusterName());
cluster.updateKeyspace(props);
eventBus.post(new KeyspaceUpdateEvent(new KeyspaceKey(clusterKey, keyspace.getName())));
} catch (ConnectionException e) {
throw new PaasException(String.format("Error creating keyspace '%s' from cluster '%s'", keyspace.getName(), clusterKey.getClusterName()), e);
}
}
@Override
public void deleteKeyspace(String keyspaceName) throws PaasException {
LOG.info("Dropping keyspace");
try {
cluster.dropKeyspace(keyspaceName);
} catch (ConnectionException e) {
throw new PaasException(String.format("Error deleting keyspace '%s' from cluster '%s'", keyspaceName, clusterKey.getClusterName()), e);
}
}
@Override
public ColumnFamilyEntity getColumnFamily(String keyspaceName, String columnFamilyName) throws NotFoundException {
Dao<ColumnFamilyEntity> dao = daoProvider.getDao(DaoKeys.DAO_COLUMN_FAMILY_ENTITY);
return dao.read(new ColumnFamilyKey(clusterKey, keyspaceName, columnFamilyName).getCanonicalName());
}
@Override
public void deleteColumnFamily(String keyspaceName, String columnFamilyName) throws PaasException {
LOG.info("Deleting column family: '{}.{}.{}'", new Object[] {clusterKey.getClusterName(), keyspaceName, columnFamilyName});
try {
cluster.dropColumnFamily(keyspaceName, columnFamilyName);
} catch (ConnectionException e) {
throw new PaasException(String.format("Error creating column family '%s.%s' on cluster '%s'",
keyspaceName, columnFamilyName, clusterKey.getClusterName()), e);
}
eventBus.post(new ColumnFamilyDeleteEvent(new ColumnFamilyKey(clusterKey, keyspaceName, columnFamilyName)));
}
@Override
public void createColumnFamily(@PathParam("keyspace") String keyspaceName, ColumnFamilyEntity columnFamily) throws PaasException {
LOG.info("Creating column family: '{}.{}.{}'", new Object[] {clusterKey.getClusterName(), keyspaceName, columnFamily.getName()});
columnFamily.setKeyspaceName(keyspaceName);
columnFamily.setClusterName(clusterKey.getClusterName());
Properties props = new Properties();
props.putAll(getDefaultColumnFamilyProperties());
if (columnFamily.getOptions() != null) {
props.putAll(columnFamily.getOptions());
}
props.setProperty("name", columnFamily.getName());
props.setProperty("keyspace", columnFamily.getKeyspaceName());
try {
cluster.createColumnFamily(props);
eventBus.post(new ColumnFamilyUpdateEvent(new ColumnFamilyKey(new KeyspaceKey(clusterKey, keyspaceName), columnFamily.getName())));
} catch (ConnectionException e) {
throw new PaasException(String.format("Error creating column family '%s.%s' on cluster '%s'",
keyspaceName, columnFamily.getName(), clusterKey.getClusterName()), e);
}
}
@Override
public void updateColumnFamily(@PathParam("keyspace") String keyspaceName, String columnFamilyName, ColumnFamilyEntity columnFamily) throws PaasException {
LOG.info("Updating column family: '{}.{}.{}'", new Object[] {clusterKey.getClusterName(), keyspaceName, columnFamily.getName()});
columnFamily.setKeyspaceName(keyspaceName);
columnFamily.setClusterName(clusterKey.getClusterName());
try {
Properties props = new Properties();
props.putAll(cluster.getColumnFamilyProperties(keyspaceName, columnFamilyName));
if (columnFamily.getOptions() != null) {
props.putAll(columnFamily.getOptions());
}
props.setProperty("name", columnFamily.getName());
props.setProperty("keyspace", columnFamily.getKeyspaceName());
cluster.createColumnFamily(props);
eventBus.post(new ColumnFamilyUpdateEvent(new ColumnFamilyKey(new KeyspaceKey(clusterKey, keyspaceName), columnFamily.getName())));
} catch (ConnectionException e) {
throw new PaasException(String.format("Error creating column family '%s.%s' on cluster '%s'",
keyspaceName, columnFamily.getName(), clusterKey.getClusterName()), e);
}
}
@Override
public Collection<ColumnFamilyEntity> listColumnFamilies() {
// TODO Auto-generated method stub
return null;
}
@Override
public Collection<String> listKeyspaceNames() {
// TODO Auto-generated method stub
return null;
}
@Override
public Collection<String> listColumnFamilyNames() {
// TODO Auto-generated method stub
return null;
}
private Properties getDefaultKeyspaceProperties() {
// TODO: Read from configuration
return new Properties();
}
private Properties getDefaultColumnFamilyProperties() {
return new Properties();
}
@Override
public Collection<KeyspaceEntity> listKeyspaces() {
// TODO Auto-generated method stub
return null;
}
}
| 3,289 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/discovery/ClusterDiscoveryService.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.discovery;
import java.util.Collection;
/**
* Abstraction for service that keeps track of clusters. These clusters are normally stored
* in a some naming service. The implementation handles any custom exclusions.
*
* @author elandau
*/
public interface ClusterDiscoveryService {
/**
* @return Return the complete list of clusters
*/
public Collection<String> getClusterNames();
/**
* Return the name of this cluster service
* @return
*/
String getName();
}
| 3,290 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/discovery/LocalClusterDiscoveryService.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.discovery;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
import com.google.common.collect.Maps;
import com.netflix.paas.entity.ClusterEntity;
import com.netflix.paas.exceptions.AlreadyExistsException;
public class LocalClusterDiscoveryService implements ClusterDiscoveryService {
private ConcurrentMap<String, ClusterEntity> clusters = Maps.newConcurrentMap();
public void addCluster(ClusterEntity cluster) throws AlreadyExistsException {
if (null != clusters.putIfAbsent(cluster.getName(), cluster)) {
throw new AlreadyExistsException("cluster", cluster.getName());
}
}
public void removeCluster(String clusterName) {
}
@Override
public Collection<String> getClusterNames() {
return clusters.keySet();
}
@Override
public String getName() {
return "localhost";
}
}
| 3,291 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/admin/CassandraClusterAdminResource.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.admin;
import java.util.Collection;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.cassandra.entity.ColumnFamilyEntity;
import com.netflix.paas.cassandra.entity.KeyspaceEntity;
import com.netflix.paas.exceptions.NotFoundException;
import com.netflix.paas.exceptions.PaasException;
public interface CassandraClusterAdminResource {
@GET
public CassandraClusterEntity getClusterDetails();
@GET
@Path("ks")
public Collection<KeyspaceEntity> listKeyspaces();
@POST
@Path("ks")
public void createKeyspace(KeyspaceEntity keyspace) throws PaasException;
@GET
@Path("ks/{keyspace}")
public KeyspaceEntity getKeyspace(@PathParam("keyspace") String keyspaceName) throws NotFoundException;
@POST
@Path("ks/{keyspace}")
public void updateKeyspace(@PathParam("keyspace") String keyspaceName, KeyspaceEntity keyspace) throws PaasException;
@DELETE
@Path("ks/{keyspace}")
public void deleteKeyspace(@PathParam("keyspace") String keyspaceName) throws PaasException;
@POST
@Path("ks/{keyspace}/cf")
public void createColumnFamily(@PathParam("keyspace") String keyspaceName, ColumnFamilyEntity columnFamily) throws PaasException;
@POST
@Path("ks/{keyspace}/cf/{columnfamily}")
public void updateColumnFamily(@PathParam("keyspace") String keyspaceName, String columnFamilyName, ColumnFamilyEntity columnFamily) throws PaasException;
@GET
@Path("ks/{keyspace}/cf/{columnfamily}")
public ColumnFamilyEntity getColumnFamily(@PathParam("keyspace") String keyspaceName, String columnFamilyName) throws NotFoundException;
@DELETE
@Path("ks/{keyspace}/cf/{columnfamily}")
public void deleteColumnFamily(@PathParam("keyspace") String keyspaceName, String columnFamilyName) throws PaasException;
@GET
@Path("cf")
public Collection<ColumnFamilyEntity> listColumnFamilies();
@GET
@Path("names/ks")
public Collection<String> listKeyspaceNames();
@GET
@Path("names/cf")
public Collection<String> listColumnFamilyNames();
}
| 3,292 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/admin/CassandraClusterAdminResourceFactory.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.admin;
import com.netflix.paas.cassandra.keys.ClusterKey;
public interface CassandraClusterAdminResourceFactory {
public CassandraClusterAdminResource get(ClusterKey clusterKey);
}
| 3,293 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/admin/CassandraSystemAdminResource.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.admin;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.netflix.paas.SchemaNames;
import com.netflix.paas.cassandra.discovery.ClusterDiscoveryService;
import com.netflix.paas.cassandra.entity.CassandraClusterEntity;
import com.netflix.paas.cassandra.tasks.ClusterDiscoveryTask;
import com.netflix.paas.dao.Dao;
import com.netflix.paas.dao.DaoProvider;
import com.netflix.paas.exceptions.NotFoundException;
import com.netflix.paas.tasks.TaskManager;
@Path("/v1/cassandra")
public class CassandraSystemAdminResource {
private static final Logger LOG = LoggerFactory.getLogger(CassandraSystemAdminResource.class);
private final Dao<CassandraClusterEntity> dao;
private final CassandraClusterAdminResourceFactory clusterResourceFactory;
private final ClusterDiscoveryService clusterDiscovery;
private final TaskManager taskManager;
private final ConcurrentMap<String, CassandraClusterAdminResource> clusters = Maps.newConcurrentMap();
private static class CassandraClusterEntityToName implements Function<CassandraClusterEntity, String> {
@Override
public String apply(CassandraClusterEntity cluster) {
return cluster.getClusterName();
}
}
@Inject
public CassandraSystemAdminResource(
TaskManager taskManager,
DaoProvider daoProvider,
ClusterDiscoveryService clusterDiscovery,
CassandraClusterAdminResourceFactory clusterResourceFactory) throws Exception {
this.clusterResourceFactory = clusterResourceFactory;
this.dao = daoProvider.getDao(SchemaNames.CONFIGURATION.name(), CassandraClusterEntity.class);
this.clusterDiscovery = clusterDiscovery;
this.taskManager = taskManager;
}
@PostConstruct
public void initialize() {
}
@PreDestroy
public void shutdown() {
}
@Path("clusters/{id}")
public CassandraClusterAdminResource getCluster(@PathParam("id") String clusterName) throws NotFoundException {
CassandraClusterAdminResource resource = clusters.get(clusterName);
if (resource == null) {
throw new NotFoundException(CassandraClusterAdminResource.class, clusterName);
}
return resource;
}
@GET
@Path("clusters")
public Set<String> listClusters() {
return clusters.keySet();
}
@GET
@Path("discover")
public void discoverClusters() {
taskManager.submit(ClusterDiscoveryTask.class);
// Set<String> foundNames = Sets.newHashSet(clusterDiscovery.getClusterNames());
// Map<String, CassandraClusterEntity> current = Maps.uniqueIndex(dao.list(), new CassandraClusterEntityToName());
//
// // Look for new clusters (may contain clusters that are disabled)
// for (String clusterName : Sets.difference(foundNames, current.keySet())) {
//// CassandraClusterEntity entity = CassandraClusterEntity.builder();
// }
//
// // Look for clusters that were removed
// for (String clusterName : Sets.difference(current.keySet(), foundNames)) {
// }
}
@POST
@Path("clusters")
public void refreshClusterList() {
// Map<String, CassandraClusterEntity> newList = Maps.uniqueIndex(dao.list(), new CassandraClusterEntityToName());
//
// // Look for new clusters (may contain clusters that are disabled)
// for (String clusterName : Sets.difference(newList.keySet(), clusters.keySet())) {
// CassandraClusterEntity entity = newList.get(clusterName);
// if (entity.isEnabled()) {
// CassandraClusterAdminResource resource = clusterResourceFactory.get(clusterName);
// if (null == clusters.putIfAbsent(clusterName, resource)) {
// // TODO: Start it
// }
// }
// }
//
// // Look for clusters that were removed
// for (String clusterName : Sets.difference(clusters.keySet(), newList.keySet())) {
// CassandraClusterAdminResource resource = clusters.remove(clusterName);
// if (resource != null) {
// // TODO: Shut it down
// }
// }
//
// // Look for clusters that may have been disabled
// for (String clusterName : Sets.intersection(clusters.keySet(), newList.keySet())) {
// CassandraClusterEntity entity = newList.get(clusterName);
// if (!entity.isEnabled()) {
// CassandraClusterAdminResource resource = clusters.remove(clusterName);
// if (resource != null) {
// // TODO: Shut it down
// }
// }
// }
}
}
| 3,294 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/provider/AstyanaxConnectionPoolMonitorProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import com.netflix.astyanax.connectionpool.ConnectionPoolMonitor;
public interface AstyanaxConnectionPoolMonitorProvider {
public ConnectionPoolMonitor get(String name);
}
| 3,295 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/provider/HostSupplierProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import java.util.List;
import com.google.common.base.Supplier;
import com.netflix.astyanax.connectionpool.Host;
public interface HostSupplierProvider {
public Supplier<List<Host>> getSupplier(String clusterName);
}
| 3,296 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/provider/AstyanaxConnectionPoolConfigurationProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import com.netflix.astyanax.connectionpool.ConnectionPoolConfiguration;
public interface AstyanaxConnectionPoolConfigurationProvider {
public ConnectionPoolConfiguration get(String name);
}
| 3,297 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/provider/ClusterClientProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import com.netflix.astyanax.Cluster;
import com.netflix.paas.cassandra.keys.ClusterKey;
/**
* Provider for cluster level client. For now the cluster level client is used
* mostly for admin purposes
*
* @author elandau
*
*/
public interface ClusterClientProvider {
/**
* Acquire a cassandra cluster by name. Must call releaseCluster once done.
* The concrete provider must implement it's own reference counting and
* garbage collection to shutdown Cluster clients that are not longer in use.
*
* @param clusterName
*/
public Cluster acquireCluster(ClusterKey clusterName);
/**
* Release a cassandra cluster that was acquired using acquireCluster
*
* @param clusterName
*/
public void releaseCluster(ClusterKey clusterName);
}
| 3,298 |
0 |
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra
|
Create_ds/staash/staash-astyanax/src/main/java/com/netflix/paas/cassandra/provider/KeyspaceClientProvider.java
|
/*******************************************************************************
* /***
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
******************************************************************************/
package com.netflix.paas.cassandra.provider;
import com.netflix.astyanax.Keyspace;
import com.netflix.paas.cassandra.keys.KeyspaceKey;
/**
* Abstraction for getting a keyspace. The implementation will handle lifecycle
* management for the keyspace.
*
* @author elandau
*
*/
public interface KeyspaceClientProvider {
/**
* Get a keyspace by name. Will create one if one does not exist. The provider
* will internally keep track of references to the keyspace and will auto remove
* it once releaseKeyspace is called and the reference count goes down to 0.
*
* @param keyspaceName Globally unique keyspace name
*
* @return A new or previously created keyspace.
*/
public Keyspace acquireKeyspace(String schemaName);
/**
* Get a keyspace by key.
* @param key
* @return
*/
public Keyspace acquireKeyspace(KeyspaceKey key);
/**
* Release a previously acquried keyspace
* @param keyspace
*/
public void releaseKeyspace(String schemaName);
}
| 3,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.