gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.util.data;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import ghidra.app.plugin.core.datamgr.util.DataTypeUtils;
import ghidra.app.services.DataTypeQueryService;
import ghidra.program.database.data.DataTypeUtilities;
import ghidra.program.database.data.ProgramDataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.exception.AssertException;
import ghidra.util.exception.CancelledException;
public class DataTypeParser {
public enum AllowedDataTypes {
/**
* All data-types are permitted (excluding bitfields)
*/
ALL,
/**
* All data-types, excluding factory data-types are permitted
*/
DYNAMIC,
/**
* All fixed-length data-types and sizable Dynamic(i.e., canSpecifyLength) data-types
*/
SIZABLE_DYNAMIC,
/**
* All fixed-length data-types, sizable Dynamic data-types.
* In addition a bitfield specification may be specified (e.g., int:2)
* for use when defining structure and union components only
* (see {@link ProxyBitFieldDataType}). Parser must be properly constructed
* with the intended {@link DataTypeParser#destinationDataTypeManager}.
* If a bitfield is returned special handling is required.
*/
SIZABLE_DYNAMIC_AND_BITFIELD,
/**
* Only Fixed-length data-types
*/
FIXED_LENGTH,
/**
* Only Fixed-length data types and string data types
*/
STRINGS_AND_FIXED_LENGTH,
/**
* Only Enums, Integer types and those Typedefs based on them
* for use as a bitfield base datatype
*/
BITFIELD_BASE_TYPE
}
/**
* <code>ProxyBitFieldDataType</code> provides acts as a proxy bitfield
* whose specification may be used when defining a structure or
* union bitfield. This datatype may not be directly applied to a program.
*/
private static class ProxyBitFieldDataType extends BitFieldDataType {
/**
* Construct proxy bitfield datatype for use when defining
* a structure or union bitfield.
* @param baseDataType a supported primitive integer data type or TypeDef to such a type.
* A deep clone of this type will be performed using the specified dataMgr.
* @param bitSize size of bit-field expressed as number of bits
* @throws InvalidDataTypeException if specified baseDataType is not permitted
*/
private ProxyBitFieldDataType(DataType baseDataType, int bitSize)
throws InvalidDataTypeException {
super(baseDataType, bitSize);
}
}
private DataTypeManager sourceDataTypeManager; // may be null
private DataTypeManager destinationDataTypeManager; // may be null
private DataTypeQueryService dataTypeManagerService; // may be null
private AllowedDataTypes allowedTypes;
/**
* A constructor that does not use the source or destination data type managers. In terms of
* the source data type manager, this means that all data type managers will be used when
* resolving data types.
*
* @param dataTypeManagerService data-type manager tool service, or null
* @param allowedTypes constrains which data-types may be parsed
*/
public DataTypeParser(DataTypeQueryService dataTypeManagerService,
AllowedDataTypes allowedTypes) {
this.dataTypeManagerService = dataTypeManagerService;
this.allowedTypes = allowedTypes;
}
/**
* Constructor
* @param sourceDataTypeManager preferred source data-type manager, or null
* @param destinationDataTypeManager target data-type manager, or null
* @param dataTypeManagerService data-type manager tool service, or null
* @param allowedTypes constrains which data-types may be parsed
*
* @see #DataTypeParser(DataTypeQueryService, AllowedDataTypes)
*/
public DataTypeParser(DataTypeManager sourceDataTypeManager,
DataTypeManager destinationDataTypeManager,
DataTypeQueryService dataTypeManagerService, AllowedDataTypes allowedTypes) {
this.sourceDataTypeManager = sourceDataTypeManager;
this.destinationDataTypeManager = destinationDataTypeManager;
this.dataTypeManagerService = dataTypeManagerService;
this.allowedTypes = allowedTypes;
}
/**
* Parse a data-type string specification
* @param dataTypeString a known data-type name followed by zero or more pointer/array decorations.
* @return parsed data-type or null if not found
* @throws InvalidDataTypeException if data-type string is invalid or length exceeds specified maxSize
* @throws CancelledException parse cancelled through user interaction
*/
public DataType parse(String dataTypeString)
throws InvalidDataTypeException, CancelledException {
return parse(dataTypeString, (CategoryPath) null);
}
/**
* Parse a data type string specification with category path. If category is not null,
* the dataTypeManagerService will not be queried.
* @param dataTypeString a known data-type name followed by zero or more pointer/array decorations.
* @param category known path of data-type or null if unknown
* @return parsed data-type or null if not found
* @throws InvalidDataTypeException if data type string is invalid or length exceeds specified
* maxSize
* @throws CancelledException parse cancelled through user interaction (only if parser
* constructed with service)
*/
public DataType parse(String dataTypeString, CategoryPath category)
throws InvalidDataTypeException, CancelledException {
dataTypeString = dataTypeString.replaceAll("\\s+", " ").trim();
String dataTypeName = getBaseString(dataTypeString);
DataType namedDt = getNamedDataType(dataTypeName, category);
if (namedDt == null) {
throw new InvalidDataTypeException("Valid data-type not specified");
}
return parseDataTypeModifiers(namedDt, dataTypeString.substring(dataTypeName.length()));
}
/**
* Parse a data type string specification using the specified baseDatatype.
*
* @param suggestedBaseDataType base data type (may be null), this will be used as the base
* data-type if its name matches the base name in the specified dataTypeString.
* @param dataTypeString a base data-type followed by a sequence of zero or more pointer/array
* decorations to be applied.
* The string may start with the baseDataType's name.
* @return parsed data-type or null if not found
* @throws InvalidDataTypeException if data-type string is invalid or length exceeds specified
* maxSize
* @throws CancelledException parse cancelled through user interaction (only if parser
* constructed with service)
*/
public DataType parse(String dataTypeString, DataType suggestedBaseDataType)
throws InvalidDataTypeException, CancelledException {
dataTypeString = dataTypeString.replaceAll("\\s+", " ").trim();
String dataTypeName = getBaseString(dataTypeString);
if (StringUtils.isBlank(dataTypeName)) {
throw new InvalidDataTypeException("missing base data-type name");
}
DataType namedDt;
if (suggestedBaseDataType != null && dataTypeName.equals(suggestedBaseDataType.getName())) {
namedDt = suggestedBaseDataType;
if (namedDt.getDataTypeManager() != destinationDataTypeManager) {
namedDt = namedDt.clone(destinationDataTypeManager);
}
}
else {
namedDt = getNamedDataType(dataTypeName, null);
if (namedDt == null) {
throw new InvalidDataTypeException("valid data-type not specified");
}
}
return parseDataTypeModifiers(namedDt, dataTypeString.substring(dataTypeName.length()));
}
/**
* Throws exception if the data type does not match the specified {@link AllowedDataTypes}.
*
* @param dt {@link DataType} to check
* @param allowedTypes {@link AllowedDataTypes enum} specifying what category of data types are ok
* @throws InvalidDataTypeException if dt violates the specified allowedTypes
*/
public static void ensureIsAllowableType(DataType dt, AllowedDataTypes allowedTypes)
throws InvalidDataTypeException {
if (dt instanceof BitFieldDataType) {
if (allowedTypes != AllowedDataTypes.SIZABLE_DYNAMIC_AND_BITFIELD) {
throw new InvalidDataTypeException("Bitfield data-type not allowed");
}
return;
}
switch (allowedTypes) {
case DYNAMIC:
if (dt instanceof FactoryDataType) {
throw new InvalidDataTypeException("Factory data-type not allowed");
}
break;
case SIZABLE_DYNAMIC:
case SIZABLE_DYNAMIC_AND_BITFIELD:
if (dt instanceof FactoryDataType) {
throw new InvalidDataTypeException("Factory data-type not allowed");
}
if (dt instanceof Dynamic && !((Dynamic) dt).canSpecifyLength()) {
throw new InvalidDataTypeException("non-sizable data-type not allowed");
}
break;
case FIXED_LENGTH:
if (dt.getLength() < 0) {
throw new InvalidDataTypeException("Fixed-length data-type required");
}
break;
case STRINGS_AND_FIXED_LENGTH:
if (dt.getLength() < 0 && !(dt instanceof AbstractStringDataType)) {
throw new InvalidDataTypeException("Fixed-length or string data-type required");
}
break;
case BITFIELD_BASE_TYPE:
if (!BitFieldDataType.isValidBaseDataType(dt)) {
throw new InvalidDataTypeException(
"Enum or integer derived data-type required");
}
break;
case ALL:
// do nothing
break;
default:
throw new InvalidDataTypeException(
"Unknown data type allowance specified: " + allowedTypes);
}
}
private DataType parseDataTypeModifiers(DataType namedDataType, String dataTypeModifiers)
throws InvalidDataTypeException {
List<DtPiece> modifiers = parseModifiers(dataTypeModifiers);
DataType dt = namedDataType;
int elementLength = dt.getLength();
try {
for (DtPiece modifier : modifiers) {
if (modifier instanceof PointerSpecPiece) {
int pointerSize = ((PointerSpecPiece) modifier).getPointerSize();
dt = new PointerDataType(dt, pointerSize, destinationDataTypeManager);
elementLength = dt.getLength();
}
else if (modifier instanceof ElementSizeSpecPiece) {
if (elementLength <= 0) {
elementLength = ((ElementSizeSpecPiece) modifier).getElementSize();
}
}
else if (modifier instanceof ArraySpecPiece) {
int elementCount = ((ArraySpecPiece) modifier).getElementCount();
dt = createArrayDataType(dt, elementLength, elementCount);
elementLength = dt.getLength();
}
else if (modifier instanceof BitfieldSpecPiece) {
if (allowedTypes != AllowedDataTypes.SIZABLE_DYNAMIC_AND_BITFIELD) {
throw new InvalidDataTypeException("Bitfield not permitted");
}
if (destinationDataTypeManager == null) {
throw new AssertException(
"Bitfields require destination datatype manager to be specified");
}
int bitSize = ((BitfieldSpecPiece) modifier).getBitSize();
dt = new ProxyBitFieldDataType(dt.clone(destinationDataTypeManager), bitSize);
}
}
}
catch (IllegalArgumentException e) {
throw new InvalidDataTypeException(e.getMessage());
}
ensureIsAllowableType(dt, allowedTypes);
return dt;
}
private List<DtPiece> parseModifiers(String dataTypeModifiers) throws InvalidDataTypeException {
int arrayStartIndex = -1;
List<DtPiece> modifiers = new ArrayList<>();
boolean terminalModifier = false;
for (String piece : splitDataTypeModifiers(dataTypeModifiers)) {
piece = piece.trim();
if (terminalModifier) {
throw new InvalidDataTypeException("Invalid data type modifier");
}
if (piece.startsWith("*")) {
modifiers.add(new PointerSpecPiece(piece));
arrayStartIndex = -1;
}
else if (piece.startsWith("[")) {
// group of array specifications are reversed for proper data-type creation order
ArraySpecPiece arraySpec = new ArraySpecPiece(piece);
if (arrayStartIndex >= 0) {
modifiers.add(arrayStartIndex, arraySpec);
}
else {
arrayStartIndex = modifiers.size();
modifiers.add(arraySpec);
}
}
else if (piece.startsWith(":")) {
terminalModifier = true;
modifiers.add(new BitfieldSpecPiece(piece));
}
else if (piece.startsWith("{")) {
// # indicates the size of an array element when the base data type is dynamic.
modifiers.add(new ElementSizeSpecPiece(piece));
arrayStartIndex = -1;
}
}
return modifiers;
}
private DataType getNamedDataType(String baseName, CategoryPath category)
throws InvalidDataTypeException, CancelledException {
List<DataType> results = new ArrayList<>();
DataType dt = findDataType(sourceDataTypeManager, baseName, category, results);
if (dt != null) {
return dt; // found a direct match
}
//
// We now either have no results or multiple results
//
if (results.isEmpty() && DataType.DEFAULT.getDisplayName().equals(baseName)) {
dt = DataType.DEFAULT;
}
else if (category == null) {
dt = findDataTypeInAllDataTypeManagers(baseName, results);
}
if (dt == null) {
String msg = "Unrecognized data type of \"" + baseName + "\"";
throw new InvalidDataTypeException(msg);
}
return dt.clone(destinationDataTypeManager);
}
private DataType findDataTypeInAllDataTypeManagers(String baseName, List<DataType> results)
throws CancelledException {
if (results.isEmpty() && dataTypeManagerService != null) {
results.addAll(
DataTypeUtils.getExactMatchingDataTypes(baseName, dataTypeManagerService));
}
if (results.isEmpty()) {
return null;
}
// try to heuristically pick the right type
DataType dt = pickFromPossibleEquivalentDataTypes(results);
if (dt != null) {
return dt;
}
// give up and ask the user
return proptUserForType(baseName);
}
private DataType proptUserForType(String baseName) throws CancelledException {
if (dataTypeManagerService == null) {
return null;
}
DataType dt = dataTypeManagerService.getDataType(baseName);
if (dt == null) {
throw new CancelledException();
}
return dt;
}
private DataType findDataType(DataTypeManager dtm, String baseName, CategoryPath category,
List<DataType> list) {
DataTypeManager builtInDTM = BuiltInDataTypeManager.getDataTypeManager();
if (dtm == null) {
// no DTM specified--try the built-ins
return findDataType(builtInDTM, baseName, category, list);
}
if (category != null) {
DataType dt = dtm.getDataType(category, baseName);
if (dt != null) {
list.add(dt);
return dt;
}
}
else {
// handle C primitives (e.g. long long, unsigned long int, etc.)
DataType dataType = DataTypeUtilities.getCPrimitiveDataType(baseName);
if (dataType != null) {
return dataType.clone(dtm);
}
dtm.findDataTypes(baseName, list);
if (list.size() == 1) {
return list.get(0);
}
}
// nothing found--try the built-ins if we haven't yet
if (list.isEmpty() && dtm != builtInDTM) {
return findDataType(builtInDTM, baseName, category, list);
}
return null;
}
// ultimately, if one of the types is from the program or the builtin types, *and* the rest of
// the data types are equivalent to that one, then this method returns that data type
private static DataType pickFromPossibleEquivalentDataTypes(List<DataType> dtList) {
DataType programDataType = null;
// see if one of the data types belongs to the program or the built in types, where the
// program is more important than the builtin
for (DataType dataType : dtList) {
DataTypeManager manager = dataType.getDataTypeManager();
if (manager instanceof BuiltInDataTypeManager) {
programDataType = dataType;
}
else if (manager instanceof ProgramDataTypeManager) {
programDataType = dataType;
break;
}
}
if (programDataType == null) {
return null;
}
for (DataType dataType : dtList) {
// just one non-matching case means that we can't use the program's data type
if (!programDataType.isEquivalent(dataType)) {
return null;
}
}
return programDataType;
}
private static String getBaseString(String dataTypeString) {
int nextIndex = 0;
int templateCount = 0;
while (nextIndex < dataTypeString.length()) {
char c = dataTypeString.charAt(nextIndex);
if (c == '<') {
templateCount++;
}
else if (c == '>') {
templateCount--;
}
if (templateCount != 0) {
++nextIndex;
continue;
}
if (c == '*' || c == '[' || c == ':' || c == '{') {
return dataTypeString.substring(0, nextIndex).trim();
}
++nextIndex;
}
return dataTypeString;
}
private static String[] splitDataTypeModifiers(String dataTypeModifiers) {
dataTypeModifiers = dataTypeModifiers.replaceAll(":[ \\t]", "");
if (dataTypeModifiers.length() == 0) {
return new String[0];
}
List<String> list = new ArrayList<>();
int startIndex = 0;
int nextIndex = 1;
while (nextIndex < dataTypeModifiers.length()) {
char c = dataTypeModifiers.charAt(nextIndex);
if (c == '*' || c == '[' || c == ':' || c == '{') {
list.add(dataTypeModifiers.substring(startIndex, nextIndex));
startIndex = nextIndex;
}
++nextIndex;
}
list.add(dataTypeModifiers.substring(startIndex, nextIndex));
String[] pieces = new String[list.size()];
list.toArray(pieces);
return pieces;
}
private DataType createArrayDataType(DataType baseDataType, int elementLength, int elementCount)
throws InvalidDataTypeException {
DataType dt = baseDataType;
if (dt instanceof TypeDef) {
dt = ((TypeDef) dt).getBaseDataType();
}
if (elementLength <= 0) {
throw new InvalidDataTypeException(
"Only a datatype with a positive size be used for an array: " +
baseDataType.getName() + "; " + elementLength);
}
return new ArrayDataType(baseDataType, elementCount, elementLength,
destinationDataTypeManager);
}
private static int parseSize(String size) {
if (StringUtils.isBlank(size)) {
throw new NumberFormatException();
}
size = size.trim();
if (StringUtils.startsWithIgnoreCase(size, "0x")) {
return Integer.parseInt(size.substring(2), 16);
}
return Integer.parseInt(size);
}
private static interface DtPiece {
// dummy interface so we don't have to use Object in the list container
}
private static class BitfieldSpecPiece implements DtPiece {
int bitSize;
BitfieldSpecPiece(String piece) throws InvalidDataTypeException {
if (piece.startsWith(":")) {
String bitSizeStr = piece.substring(1);
try {
bitSize = parseSize(bitSizeStr);
if (bitSize >= 0) {
return;
}
}
catch (NumberFormatException e) {
// handled below
}
}
throw new InvalidDataTypeException("Invalid bitfield specification: " + piece);
}
int getBitSize() {
return bitSize;
}
}
private static class ArraySpecPiece implements DtPiece {
int elementCount;
ArraySpecPiece(String piece) throws InvalidDataTypeException {
if (piece.startsWith("[") && piece.endsWith("]")) {
try {
String elementCountStr = piece.substring(1, piece.length() - 1);
if (elementCountStr.length() == 0) {
// treat empty array spec same as 0
// consumer may need to handle resulting array as a pointer (e.g., parameter)
elementCount = 0;
}
else {
elementCount = parseSize(elementCountStr);
}
return;
}
catch (NumberFormatException e) {
// handled below
}
}
throw new InvalidDataTypeException("Invalid array specification: " + piece);
}
int getElementCount() {
return elementCount;
}
}
private static class PointerSpecPiece implements DtPiece {
int pointerSize = -1;
PointerSpecPiece(String piece) throws InvalidDataTypeException {
if (!piece.startsWith("*")) {
throw new InvalidDataTypeException("Invalid pointer specification: " + piece);
}
if (piece.length() == 1) {
return;
}
try {
pointerSize = Integer.parseInt(piece.substring(1));
}
catch (NumberFormatException e) {
throw new InvalidDataTypeException("Invalid pointer specification: " + piece);
}
int mod = pointerSize % 8;
pointerSize = pointerSize / 8;
if (mod != 0 || pointerSize <= 0 || pointerSize > 8) {
throw new InvalidDataTypeException("Invalid pointer size: " + piece);
}
}
int getPointerSize() {
return pointerSize;
}
}
private static class ElementSizeSpecPiece implements DtPiece {
int elementSize;
ElementSizeSpecPiece(String piece) throws InvalidDataTypeException {
if (piece.startsWith("{") && piece.endsWith("}")) {
String elementSizeStr = piece.substring(1, piece.length() - 1);
try {
elementSize = parseSize(elementSizeStr);
return;
}
catch (NumberFormatException e) {
// handled below
}
}
throw new InvalidDataTypeException(
"Invalid array element size specification: " + piece);
}
int getElementSize() {
return elementSize;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.igfs.common;
import org.apache.ignite.*;
import org.apache.ignite.igfs.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.jetbrains.annotations.*;
import java.io.*;
import java.util.*;
import static org.apache.ignite.internal.igfs.common.IgfsIpcCommand.*;
/**
* Implementation of IGFS client message marshaller.
*/
public class IgfsMarshaller {
/** Packet header size. */
public static final int HEADER_SIZE = 24;
/**
* Creates new header with given request ID and command.
*
* @param reqId Request ID.
* @param cmd Command.
* @return Created header.
*/
public static byte[] createHeader(long reqId, IgfsIpcCommand cmd) {
assert cmd != null;
byte[] hdr = new byte[HEADER_SIZE];
U.longToBytes(reqId, hdr, 0);
U.intToBytes(cmd.ordinal(), hdr, 8);
return hdr;
}
/**
* Creates new header with given request ID and command.
*
* @param reqId Request ID.
* @param cmd Command.
* @return Created header.
*/
public static byte[] fillHeader(byte[] hdr, long reqId, IgfsIpcCommand cmd) {
assert cmd != null;
Arrays.fill(hdr, (byte)0);
U.longToBytes(reqId, hdr, 0);
U.intToBytes(cmd.ordinal(), hdr, 8);
return hdr;
}
/**
* @param msg Message.
* @param hdr Message header.
* @param out Output.
* @throws IgniteCheckedException If failed.
*/
public void marshall(IgfsMessage msg, byte[] hdr, ObjectOutput out) throws IgniteCheckedException {
assert hdr != null;
assert hdr.length == HEADER_SIZE;
try {
switch (msg.command()) {
case HANDSHAKE: {
out.write(hdr);
IgfsHandshakeRequest req = (IgfsHandshakeRequest)msg;
U.writeString(out, req.gridName());
U.writeString(out, req.igfsName());
U.writeString(out, req.logDirectory());
break;
}
case STATUS: {
out.write(hdr);
break;
}
case EXISTS:
case INFO:
case PATH_SUMMARY:
case UPDATE:
case RENAME:
case DELETE:
case MAKE_DIRECTORIES:
case LIST_PATHS:
case LIST_FILES:
case AFFINITY:
case SET_TIMES:
case OPEN_READ:
case OPEN_APPEND:
case OPEN_CREATE: {
out.write(hdr);
IgfsPathControlRequest req = (IgfsPathControlRequest)msg;
writePath(out, req.path());
writePath(out, req.destinationPath());
out.writeBoolean(req.flag());
out.writeBoolean(req.colocate());
U.writeStringMap(out, req.properties());
// Minor optimization.
if (msg.command() == AFFINITY) {
out.writeLong(req.start());
out.writeLong(req.length());
}
else if (msg.command() == OPEN_CREATE) {
out.writeInt(req.replication());
out.writeLong(req.blockSize());
}
else if (msg.command() == SET_TIMES) {
out.writeLong(req.accessTime());
out.writeLong(req.modificationTime());
}
else if (msg.command() == OPEN_READ && req.flag())
out.writeInt(req.sequentialReadsBeforePrefetch());
break;
}
case CLOSE:
case READ_BLOCK:
case WRITE_BLOCK: {
assert msg.command() != WRITE_BLOCK : "WRITE_BLOCK should be marshalled manually.";
IgfsStreamControlRequest req = (IgfsStreamControlRequest)msg;
U.longToBytes(req.streamId(), hdr, 12);
if (msg.command() == READ_BLOCK)
U.intToBytes(req.length(), hdr, 20);
out.write(hdr);
if (msg.command() == READ_BLOCK)
out.writeLong(req.position());
break;
}
case CONTROL_RESPONSE: {
out.write(hdr);
IgfsControlResponse res = (IgfsControlResponse)msg;
res.writeExternal(out);
break;
}
default: {
assert false : "Invalid command: " + msg.command();
throw new IllegalArgumentException("Failed to marshal message (invalid command): " +
msg.command());
}
}
}
catch (IOException e) {
throw new IgniteCheckedException("Failed to send message to IGFS data node (is data node up and running?)", e);
}
}
/**
* @param cmd Command.
* @param hdr Header.
* @param in Input.
* @return Message.
* @throws IgniteCheckedException If failed.
*/
public IgfsMessage unmarshall(IgfsIpcCommand cmd, byte[] hdr, ObjectInput in) throws IgniteCheckedException {
assert hdr != null;
assert hdr.length == HEADER_SIZE;
try {
IgfsMessage msg;
switch (cmd) {
case HANDSHAKE: {
IgfsHandshakeRequest req = new IgfsHandshakeRequest();
req.gridName(U.readString(in));
req.igfsName(U.readString(in));
req.logDirectory(U.readString(in));
msg = req;
break;
}
case STATUS: {
msg = new IgfsStatusRequest();
break;
}
case EXISTS:
case INFO:
case PATH_SUMMARY:
case UPDATE:
case RENAME:
case DELETE:
case MAKE_DIRECTORIES:
case LIST_PATHS:
case LIST_FILES:
case SET_TIMES:
case AFFINITY:
case OPEN_READ:
case OPEN_APPEND:
case OPEN_CREATE: {
IgfsPathControlRequest req = new IgfsPathControlRequest();
req.path(readPath(in));
req.destinationPath(readPath(in));
req.flag(in.readBoolean());
req.colocate(in.readBoolean());
req.properties(U.readStringMap(in));
// Minor optimization.
if (cmd == AFFINITY) {
req.start(in.readLong());
req.length(in.readLong());
}
else if (cmd == OPEN_CREATE) {
req.replication(in.readInt());
req.blockSize(in.readLong());
}
else if (cmd == SET_TIMES) {
req.accessTime(in.readLong());
req.modificationTime(in.readLong());
}
else if (cmd == OPEN_READ && req.flag())
req.sequentialReadsBeforePrefetch(in.readInt());
msg = req;
break;
}
case CLOSE:
case READ_BLOCK:
case WRITE_BLOCK: {
IgfsStreamControlRequest req = new IgfsStreamControlRequest();
long streamId = U.bytesToLong(hdr, 12);
req.streamId(streamId);
req.length(U.bytesToInt(hdr, 20));
if (cmd == READ_BLOCK)
req.position(in.readLong());
msg = req;
break;
}
case CONTROL_RESPONSE: {
IgfsControlResponse res = new IgfsControlResponse();
res.readExternal(in);
msg = res;
break;
}
default: {
assert false : "Invalid command: " + cmd;
throw new IllegalArgumentException("Failed to unmarshal message (invalid command): " + cmd);
}
}
assert msg != null;
msg.command(cmd);
return msg;
}
catch (IOException | ClassNotFoundException e) {
throw new IgniteCheckedException("Failed to unmarshal client message: " + cmd, e);
}
}
/**
* Writes IGFS path to given data output. Can write {@code null} values.
*
* @param out Data output.
* @param path Path to write.
* @throws IOException If write failed.
*/
private void writePath(ObjectOutput out, @Nullable IgfsPath path) throws IOException {
out.writeBoolean(path != null);
if (path != null)
path.writeExternal(out);
}
/**
* Reads IGFS path from data input that was written by {@link #writePath(ObjectOutput, org.apache.ignite.igfs.IgfsPath)}
* method.
*
* @param in Data input.
* @return Written path or {@code null}.
*/
@Nullable private IgfsPath readPath(ObjectInput in) throws IOException {
if(in.readBoolean()) {
IgfsPath path = new IgfsPath();
path.readExternal(in);
return path;
}
return null;
}
/**
* Writes string to output.
*
* @param out Data output.
* @param str String.
* @throws IOException If write failed.
*/
private void writeString(DataOutput out, @Nullable String str) throws IOException {
out.writeBoolean(str != null);
if (str != null)
out.writeUTF(str);
}
/**
* Reads string from input.
*
* @param in Data input.
* @return Read string.
* @throws IOException If read failed.
*/
@Nullable private String readString(DataInput in) throws IOException {
boolean hasStr = in.readBoolean();
if (hasStr)
return in.readUTF();
return null;
}
}
|
|
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.physics.box2d;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.BodyDef.BodyType;
import com.badlogic.gdx.utils.Array;
/** A rigid body. These are created via World.CreateBody.
* @author mzechner */
public class Body {
// @off
/*JNI
#include <Box2D/Box2D.h>
*/
/** the address of the body **/
protected long addr;
/** temporary float array **/
private final float[] tmp = new float[4];
/** World **/
private final World world;
/** Fixtures of this body **/
private Array<Fixture> fixtures = new Array<Fixture>(2);
/** Joints of this body **/
protected Array<JointEdge> joints = new Array<JointEdge>(2);
/** user data **/
private Object userData;
/** Constructs a new body with the given address
* @param world the world
* @param addr the address */
protected Body (World world, long addr) {
this.world = world;
this.addr = addr;
}
/** Resets this body after fetching it from the {@link World#freeBodies} Pool. */
protected void reset (long addr) {
this.addr = addr;
this.userData = null;
for (int i = 0; i < fixtures.size; i++)
this.world.freeFixtures.free(fixtures.get(i));
fixtures.clear();
this.joints.clear();
}
/** Creates a fixture and attach it to this body. Use this function if you need to set some fixture parameters, like friction.
* Otherwise you can create the fixture directly from a shape. If the density is non-zero, this function automatically updates
* the mass of the body. Contacts are not created until the next time step.
* @param def the fixture definition.
* @warning This function is locked during callbacks. */
public Fixture createFixture (FixtureDef def) {
long fixtureAddr = jniCreateFixture(addr, def.shape.addr, def.friction, def.restitution, def.density, def.isSensor,
def.filter.categoryBits, def.filter.maskBits, def.filter.groupIndex);
Fixture fixture = this.world.freeFixtures.obtain();
fixture.reset(this, fixtureAddr);
this.world.fixtures.put(fixture.addr, fixture);
this.fixtures.add(fixture);
return fixture;
}
private native long jniCreateFixture (long addr, long shapeAddr, float friction, float restitution, float density,
boolean isSensor, short filterCategoryBits, short filterMaskBits, short filterGroupIndex); /*
b2Body* body = (b2Body*)addr;
b2Shape* shape = (b2Shape*)shapeAddr;
b2FixtureDef fixtureDef;
fixtureDef.shape = shape;
fixtureDef.friction = friction;
fixtureDef.restitution = restitution;
fixtureDef.density = density;
fixtureDef.isSensor = isSensor;
fixtureDef.filter.maskBits = filterMaskBits;
fixtureDef.filter.categoryBits = filterCategoryBits;
fixtureDef.filter.groupIndex = filterGroupIndex;
return (jlong)body->CreateFixture( &fixtureDef );
*/
/** Creates a fixture from a shape and attach it to this body. This is a convenience function. Use b2FixtureDef if you need to
* set parameters like friction, restitution, user data, or filtering. If the density is non-zero, this function automatically
* updates the mass of the body.
* @param shape the shape to be cloned.
* @param density the shape density (set to zero for static bodies).
* @warning This function is locked during callbacks. */
public Fixture createFixture (Shape shape, float density) {
long fixtureAddr = jniCreateFixture(addr, shape.addr, density);
Fixture fixture = this.world.freeFixtures.obtain();
fixture.reset(this, fixtureAddr);
this.world.fixtures.put(fixture.addr, fixture);
this.fixtures.add(fixture);
return fixture;
}
private native long jniCreateFixture (long addr, long shapeAddr, float density); /*
b2Body* body = (b2Body*)addr;
b2Shape* shape = (b2Shape*)shapeAddr;
return (jlong)body->CreateFixture( shape, density );
*/
/** Destroy a fixture. This removes the fixture from the broad-phase and destroys all contacts associated with this fixture.
* This will automatically adjust the mass of the body if the body is dynamic and the fixture has positive density. All
* fixtures attached to a body are implicitly destroyed when the body is destroyed.
* @param fixture the fixture to be removed.
* @warning This function is locked during callbacks. */
public void destroyFixture (Fixture fixture) {
jniDestroyFixture(addr, fixture.addr);
this.world.fixtures.remove(fixture.addr);
this.fixtures.removeValue(fixture, true);
this.world.freeFixtures.free(fixture);
}
private native void jniDestroyFixture (long addr, long fixtureAddr); /*
b2Body* body = (b2Body*)addr;
b2Fixture* fixture = (b2Fixture*)fixtureAddr;
body->DestroyFixture(fixture);
*/
/** Set the position of the body's origin and rotation. This breaks any contacts and wakes the other bodies. Manipulating a
* body's transform may cause non-physical behavior.
* @param position the world position of the body's local origin.
* @param angle the world rotation in radians. */
public void setTransform (Vector2 position, float angle) {
jniSetTransform(addr, position.x, position.y, angle);
}
/** Set the position of the body's origin and rotation. This breaks any contacts and wakes the other bodies. Manipulating a
* body's transform may cause non-physical behavior.
* @param x the world position on the x-axis
* @param y the world position on the y-axis
* @param angle the world rotation in radians. */
public void setTransform (float x, float y, float angle) {
jniSetTransform(addr, x, y, angle);
}
private native void jniSetTransform (long addr, float positionX, float positionY, float angle); /*
b2Body* body = (b2Body*)addr;
body->SetTransform(b2Vec2(positionX, positionY), angle);
*/
private final Transform transform = new Transform();
/** Get the body transform for the body's origin. */
public Transform getTransform () {
jniGetTransform(addr, transform.vals);
return transform;
}
private native void jniGetTransform (long addr, float[] vals); /*
b2Body* body = (b2Body*)addr;
b2Transform t = body->GetTransform();
vals[0] = t.p.x;
vals[1] = t.p.y;
vals[2] = t.q.c;
vals[3] = t.q.s;
*/
private final Vector2 position = new Vector2();
/** Get the world body origin position.
* @return the world position of the body's origin. */
public Vector2 getPosition () {
jniGetPosition(addr, tmp);
position.x = tmp[0];
position.y = tmp[1];
return position;
}
private native void jniGetPosition (long addr, float[] position); /*
b2Body* body = (b2Body*)addr;
b2Vec2 p = body->GetPosition();
position[0] = p.x;
position[1] = p.y;
*/
/** Get the angle in radians.
* @return the current world rotation angle in radians. */
public float getAngle () {
return jniGetAngle(addr);
}
private native float jniGetAngle (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetAngle();
*/
private final Vector2 worldCenter = new Vector2();
/** Get the world position of the center of mass. */
public Vector2 getWorldCenter () {
jniGetWorldCenter(addr, tmp);
worldCenter.x = tmp[0];
worldCenter.y = tmp[1];
return worldCenter;
}
private native void jniGetWorldCenter (long addr, float[] worldCenter); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetWorldCenter();
worldCenter[0] = w.x;
worldCenter[1] = w.y;
*/
private final Vector2 localCenter = new Vector2();
/** Get the local position of the center of mass. */
public Vector2 getLocalCenter () {
jniGetLocalCenter(addr, tmp);
localCenter.x = tmp[0];
localCenter.y = tmp[1];
return localCenter;
}
private native void jniGetLocalCenter (long addr, float[] localCenter); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetLocalCenter();
localCenter[0] = w.x;
localCenter[1] = w.y;
*/
/** Set the linear velocity of the center of mass. */
public void setLinearVelocity (Vector2 v) {
jniSetLinearVelocity(addr, v.x, v.y);
}
/** Set the linear velocity of the center of mass. */
public void setLinearVelocity (float vX, float vY) {
jniSetLinearVelocity(addr, vX, vY);
}
private native void jniSetLinearVelocity (long addr, float x, float y); /*
b2Body* body = (b2Body*)addr;
body->SetLinearVelocity(b2Vec2(x, y));
*/
private final Vector2 linearVelocity = new Vector2();
/** Get the linear velocity of the center of mass. */
public Vector2 getLinearVelocity () {
jniGetLinearVelocity(addr, tmp);
linearVelocity.x = tmp[0];
linearVelocity.y = tmp[1];
return linearVelocity;
}
private native void jniGetLinearVelocity (long addr, float[] linearVelocity); /*
b2Body* body = (b2Body*)addr;
b2Vec2 l = body->GetLinearVelocity();
linearVelocity[0] = l.x;
linearVelocity[1] = l.y;
*/
/** Set the angular velocity. */
public void setAngularVelocity (float omega) {
jniSetAngularVelocity(addr, omega);
}
private native void jniSetAngularVelocity (long addr, float omega); /*
b2Body* body = (b2Body*)addr;
body->SetAngularVelocity(omega);
*/
/** Get the angular velocity. */
public float getAngularVelocity () {
return jniGetAngularVelocity(addr);
}
private native float jniGetAngularVelocity (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetAngularVelocity();
*/
/** Apply a force at a world point. If the force is not applied at the center of mass, it will generate a torque and affect the
* angular velocity. This wakes up the body.
* @param force the world force vector, usually in Newtons (N).
* @param point the world position of the point of application.
* @param wake up the body */
public void applyForce (Vector2 force, Vector2 point, boolean wake) {
jniApplyForce(addr, force.x, force.y, point.x, point.y, wake);
}
/** Apply a force at a world point. If the force is not applied at the center of mass, it will generate a torque and affect the
* angular velocity. This wakes up the body.
* @param forceX the world force vector on x, usually in Newtons (N).
* @param forceY the world force vector on y, usually in Newtons (N).
* @param pointX the world position of the point of application on x.
* @param pointY the world position of the point of application on y.
* @param wake up the body*/
public void applyForce (float forceX, float forceY, float pointX, float pointY, boolean wake) {
jniApplyForce(addr, forceX, forceY, pointX, pointY, wake);
}
private native void jniApplyForce (long addr, float forceX, float forceY, float pointX, float pointY, boolean wake); /*
b2Body* body = (b2Body*)addr;
body->ApplyForce(b2Vec2(forceX, forceY), b2Vec2(pointX, pointY), wake);
*/
/** Apply a force to the center of mass. This wakes up the body.
* @param force the world force vector, usually in Newtons (N). */
public void applyForceToCenter (Vector2 force, boolean wake) {
jniApplyForceToCenter(addr, force.x, force.y, wake);
}
/** Apply a force to the center of mass. This wakes up the body.
* @param forceX the world force vector, usually in Newtons (N).
* @param forceY the world force vector, usually in Newtons (N). */
public void applyForceToCenter (float forceX, float forceY, boolean wake) {
jniApplyForceToCenter(addr, forceX, forceY, wake);
}
private native void jniApplyForceToCenter (long addr, float forceX, float forceY, boolean wake); /*
b2Body* body = (b2Body*)addr;
body->ApplyForceToCenter(b2Vec2(forceX, forceY), wake);
*/
/** Apply a torque. This affects the angular velocity without affecting the linear velocity of the center of mass. This wakes up
* the body.
* @param torque about the z-axis (out of the screen), usually in N-m.
* @param wake up the body */
public void applyTorque (float torque, boolean wake) {
jniApplyTorque(addr, torque, wake);
}
private native void jniApplyTorque (long addr, float torque, boolean wake); /*
b2Body* body = (b2Body*)addr;
body->ApplyTorque(torque, wake);
*/
/** Apply an impulse at a point. This immediately modifies the velocity. It also modifies the angular velocity if the point of
* application is not at the center of mass. This wakes up the body.
* @param impulse the world impulse vector, usually in N-seconds or kg-m/s.
* @param point the world position of the point of application.
* @param wake up the body*/
public void applyLinearImpulse (Vector2 impulse, Vector2 point, boolean wake) {
jniApplyLinearImpulse(addr, impulse.x, impulse.y, point.x, point.y, wake);
}
/** Apply an impulse at a point. This immediately modifies the velocity. It also modifies the angular velocity if the point of
* application is not at the center of mass. This wakes up the body.
* @param impulseX the world impulse vector on the x-axis, usually in N-seconds or kg-m/s.
* @param impulseY the world impulse vector on the y-axis, usually in N-seconds or kg-m/s.
* @param pointX the world position of the point of application on the x-axis.
* @param pointY the world position of the point of application on the y-axis.
* @param wake up the body*/
public void applyLinearImpulse (float impulseX, float impulseY, float pointX, float pointY, boolean wake) {
jniApplyLinearImpulse(addr, impulseX, impulseY, pointX, pointY, wake);
}
private native void jniApplyLinearImpulse (long addr, float impulseX, float impulseY, float pointX, float pointY, boolean wake); /*
b2Body* body = (b2Body*)addr;
body->ApplyLinearImpulse( b2Vec2( impulseX, impulseY ), b2Vec2( pointX, pointY ), wake);
*/
/** Apply an angular impulse.
* @param impulse the angular impulse in units of kg*m*m/s */
public void applyAngularImpulse (float impulse, boolean wake) {
jniApplyAngularImpulse(addr, impulse, wake);
}
private native void jniApplyAngularImpulse (long addr, float impulse, boolean wake); /*
b2Body* body = (b2Body*)addr;
body->ApplyAngularImpulse(impulse, wake);
*/
/** Get the total mass of the body.
* @return the mass, usually in kilograms (kg). */
public float getMass () {
return jniGetMass(addr);
}
private native float jniGetMass (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetMass();
*/
/** Get the rotational inertia of the body about the local origin.
* @return the rotational inertia, usually in kg-m^2. */
public float getInertia () {
return jniGetInertia(addr);
}
private native float jniGetInertia (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetInertia();
*/
private final MassData massData = new MassData();
/** Get the mass data of the body.
* @return a struct containing the mass, inertia and center of the body. */
public MassData getMassData () {
jniGetMassData(addr, tmp);
massData.mass = tmp[0];
massData.center.x = tmp[1];
massData.center.y = tmp[2];
massData.I = tmp[3];
return massData;
}
private native void jniGetMassData (long addr, float[] massData); /*
b2Body* body = (b2Body*)addr;
b2MassData m;
body->GetMassData(&m);
massData[0] = m.mass;
massData[1] = m.center.x;
massData[2] = m.center.y;
massData[3] = m.I;
*/
/** Set the mass properties to override the mass properties of the fixtures. Note that this changes the center of mass position.
* Note that creating or destroying fixtures can also alter the mass. This function has no effect if the body isn't dynamic.
* @param data the mass properties. */
public void setMassData (MassData data) {
jniSetMassData(addr, data.mass, data.center.x, data.center.y, data.I);
}
private native void jniSetMassData (long addr, float mass, float centerX, float centerY, float I); /*
b2Body* body = (b2Body*)addr;
b2MassData m;
m.mass = mass;
m.center.x = centerX;
m.center.y = centerY;
m.I = I;
body->SetMassData(&m);
*/
/** This resets the mass properties to the sum of the mass properties of the fixtures. This normally does not need to be called
* unless you called SetMassData to override the mass and you later want to reset the mass. */
public void resetMassData () {
jniResetMassData(addr);
}
private native void jniResetMassData (long addr); /*
b2Body* body = (b2Body*)addr;
body->ResetMassData();
*/
private final Vector2 localPoint = new Vector2();
/** Get the world coordinates of a point given the local coordinates.
* @param localPoint a point on the body measured relative the the body's origin.
* @return the same point expressed in world coordinates. */
public Vector2 getWorldPoint (Vector2 localPoint) {
jniGetWorldPoint(addr, localPoint.x, localPoint.y, tmp);
this.localPoint.x = tmp[0];
this.localPoint.y = tmp[1];
return this.localPoint;
}
private native void jniGetWorldPoint (long addr, float localPointX, float localPointY, float[] worldPoint); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetWorldPoint( b2Vec2( localPointX, localPointY ) );
worldPoint[0] = w.x;
worldPoint[1] = w.y;
*/
private final Vector2 worldVector = new Vector2();
/** Get the world coordinates of a vector given the local coordinates.
* @param localVector a vector fixed in the body.
* @return the same vector expressed in world coordinates. */
public Vector2 getWorldVector (Vector2 localVector) {
jniGetWorldVector(addr, localVector.x, localVector.y, tmp);
worldVector.x = tmp[0];
worldVector.y = tmp[1];
return worldVector;
}
private native void jniGetWorldVector (long addr, float localVectorX, float localVectorY, float[] worldVector); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetWorldVector( b2Vec2( localVectorX, localVectorY ) );
worldVector[0] = w.x;
worldVector[1] = w.y;
*/
public final Vector2 localPoint2 = new Vector2();
/** Gets a local point relative to the body's origin given a world point.
* @param worldPoint a point in world coordinates.
* @return the corresponding local point relative to the body's origin. */
public Vector2 getLocalPoint (Vector2 worldPoint) {
jniGetLocalPoint(addr, worldPoint.x, worldPoint.y, tmp);
localPoint2.x = tmp[0];
localPoint2.y = tmp[1];
return localPoint2;
}
private native void jniGetLocalPoint (long addr, float worldPointX, float worldPointY, float[] localPoint); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetLocalPoint( b2Vec2( worldPointX, worldPointY ) );
localPoint[0] = w.x;
localPoint[1] = w.y;
*/
public final Vector2 localVector = new Vector2();
/** Gets a local vector given a world vector.
* @param worldVector a vector in world coordinates.
* @return the corresponding local vector. */
public Vector2 getLocalVector (Vector2 worldVector) {
jniGetLocalVector(addr, worldVector.x, worldVector.y, tmp);
localVector.x = tmp[0];
localVector.y = tmp[1];
return localVector;
}
private native void jniGetLocalVector (long addr, float worldVectorX, float worldVectorY, float[] worldVector); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetLocalVector( b2Vec2( worldVectorX, worldVectorY ) );
worldVector[0] = w.x;
worldVector[1] = w.y;
*/
public final Vector2 linVelWorld = new Vector2();
/** Get the world linear velocity of a world point attached to this body.
* @param worldPoint a point in world coordinates.
* @return the world velocity of a point. */
public Vector2 getLinearVelocityFromWorldPoint (Vector2 worldPoint) {
jniGetLinearVelocityFromWorldPoint(addr, worldPoint.x, worldPoint.y, tmp);
linVelWorld.x = tmp[0];
linVelWorld.y = tmp[1];
return linVelWorld;
}
private native void jniGetLinearVelocityFromWorldPoint (long addr, float worldPointX, float worldPointY, float[] linVelWorld); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetLinearVelocityFromWorldPoint( b2Vec2( worldPointX, worldPointY ) );
linVelWorld[0] = w.x;
linVelWorld[1] = w.y;
*/
public final Vector2 linVelLoc = new Vector2();
/** Get the world velocity of a local point.
* @param localPoint a point in local coordinates.
* @return the world velocity of a point. */
public Vector2 getLinearVelocityFromLocalPoint (Vector2 localPoint) {
jniGetLinearVelocityFromLocalPoint(addr, localPoint.x, localPoint.y, tmp);
linVelLoc.x = tmp[0];
linVelLoc.y = tmp[1];
return linVelLoc;
}
private native void jniGetLinearVelocityFromLocalPoint (long addr, float localPointX, float localPointY, float[] linVelLoc); /*
b2Body* body = (b2Body*)addr;
b2Vec2 w = body->GetLinearVelocityFromLocalPoint( b2Vec2( localPointX, localPointY ) );
linVelLoc[0] = w.x;
linVelLoc[1] = w.y;
*/
/** Get the linear damping of the body. */
public float getLinearDamping () {
return jniGetLinearDamping(addr);
}
private native float jniGetLinearDamping (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetLinearDamping();
*/
/** Set the linear damping of the body. */
public void setLinearDamping (float linearDamping) {
jniSetLinearDamping(addr, linearDamping);
}
private native void jniSetLinearDamping (long addr, float linearDamping); /*
b2Body* body = (b2Body*)addr;
body->SetLinearDamping(linearDamping);
*/
/** Get the angular damping of the body. */
public float getAngularDamping () {
return jniGetAngularDamping(addr);
}
private native float jniGetAngularDamping (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetAngularDamping();
*/
/** Set the angular damping of the body. */
public void setAngularDamping (float angularDamping) {
jniSetAngularDamping(addr, angularDamping);
}
private native void jniSetAngularDamping (long addr, float angularDamping); /*
b2Body* body = (b2Body*)addr;
body->SetAngularDamping(angularDamping);
*/
/** Set the type of this body. This may alter the mass and velocity. */
public void setType (BodyType type) {
jniSetType(addr, type.getValue());
}
// @off
/*JNI
inline b2BodyType getBodyType( int type )
{
switch( type )
{
case 0: return b2_staticBody;
case 1: return b2_kinematicBody;
case 2: return b2_dynamicBody;
default:
return b2_staticBody;
}
}
*/
private native void jniSetType (long addr, int type); /*
b2Body* body = (b2Body*)addr;
body->SetType(getBodyType(type));
*/
/** Get the type of this body. */
public BodyType getType () {
int type = jniGetType(addr);
if (type == 0) return BodyType.StaticBody;
if (type == 1) return BodyType.KinematicBody;
if (type == 2) return BodyType.DynamicBody;
return BodyType.StaticBody;
}
private native int jniGetType (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetType();
*/
/** Should this body be treated like a bullet for continuous collision detection? */
public void setBullet (boolean flag) {
jniSetBullet(addr, flag);
}
private native void jniSetBullet (long addr, boolean flag); /*
b2Body* body = (b2Body*)addr;
body->SetBullet(flag);
*/
/** Is this body treated like a bullet for continuous collision detection? */
public boolean isBullet () {
return jniIsBullet(addr);
}
private native boolean jniIsBullet (long addr); /*
b2Body* body = (b2Body*)addr;
return body->IsBullet();
*/
/** You can disable sleeping on this body. If you disable sleeping, the */
public void setSleepingAllowed (boolean flag) {
jniSetSleepingAllowed(addr, flag);
}
private native void jniSetSleepingAllowed (long addr, boolean flag); /*
b2Body* body = (b2Body*)addr;
body->SetSleepingAllowed(flag);
*/
/** Is this body allowed to sleep */
public boolean isSleepingAllowed () {
return jniIsSleepingAllowed(addr);
}
private native boolean jniIsSleepingAllowed (long addr); /*
b2Body* body = (b2Body*)addr;
return body->IsSleepingAllowed();
*/
/** Set the sleep state of the body. A sleeping body has very low CPU cost.
* @param flag set to true to put body to sleep, false to wake it. */
public void setAwake (boolean flag) {
jniSetAwake(addr, flag);
}
private native void jniSetAwake (long addr, boolean flag); /*
b2Body* body = (b2Body*)addr;
body->SetAwake(flag);
*/
/** Get the sleeping state of this body.
* @return true if the body is sleeping. */
public boolean isAwake () {
return jniIsAwake(addr);
}
private native boolean jniIsAwake (long addr); /*
b2Body* body = (b2Body*)addr;
return body->IsAwake();
*/
/** Set the active state of the body. An inactive body is not simulated and cannot be collided with or woken up. If you pass a
* flag of true, all fixtures will be added to the broad-phase. If you pass a flag of false, all fixtures will be removed from
* the broad-phase and all contacts will be destroyed. Fixtures and joints are otherwise unaffected. You may continue to
* create/destroy fixtures and joints on inactive bodies. Fixtures on an inactive body are implicitly inactive and will not
* participate in collisions, ray-casts, or queries. Joints connected to an inactive body are implicitly inactive. An inactive
* body is still owned by a b2World object and remains in the body list. */
public void setActive (boolean flag) {
jniSetActive(addr, flag);
}
private native void jniSetActive (long addr, boolean flag); /*
b2Body* body = (b2Body*)addr;
body->SetActive(flag);
*/
/** Get the active state of the body. */
public boolean isActive () {
return jniIsActive(addr);
}
private native boolean jniIsActive (long addr); /*
b2Body* body = (b2Body*)addr;
return body->IsActive();
*/
/** Set this body to have fixed rotation. This causes the mass to be reset. */
public void setFixedRotation (boolean flag) {
jniSetFixedRotation(addr, flag);
}
private native void jniSetFixedRotation (long addr, boolean flag); /*
b2Body* body = (b2Body*)addr;
body->SetFixedRotation(flag);
*/
/** Does this body have fixed rotation? */
public boolean isFixedRotation () {
return jniIsFixedRotation(addr);
}
private native boolean jniIsFixedRotation (long addr); /*
b2Body* body = (b2Body*)addr;
return body->IsFixedRotation();
*/
/** Get the list of all fixtures attached to this body. Do not modify the list! */
public Array<Fixture> getFixtureList () {
return fixtures;
}
/** Get the list of all joints attached to this body. Do not modify the list! */
public Array<JointEdge> getJointList () {
return joints;
}
/** Get the list of all contacts attached to this body.
* @warning this list changes during the time step and you may miss some collisions if you don't use b2ContactListener. Do not
* modify the returned list! */
// Array<ContactEdge> getContactList()
// {
// return contacts;
// }
/** @return Get the gravity scale of the body. */
public float getGravityScale () {
return jniGetGravityScale(addr);
}
private native float jniGetGravityScale (long addr); /*
b2Body* body = (b2Body*)addr;
return body->GetGravityScale();
*/
/** Sets the gravity scale of the body */
public void setGravityScale (float scale) {
jniSetGravityScale(addr, scale);
}
private native void jniSetGravityScale (long addr, float scale); /*
b2Body* body = (b2Body*)addr;
body->SetGravityScale(scale);
*/
/** Get the parent world of this body. */
public World getWorld () {
return world;
}
/** Get the user data */
public Object getUserData () {
return userData;
}
/** Set the user data */
public void setUserData (Object userData) {
this.userData = userData;
}
}
|
|
/*L
* Copyright Washington University in St. Louis
* Copyright SemanticBits
* Copyright Persistent Systems
* Copyright Krishagni
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/catissue-core/LICENSE.txt for details.
*/
/**
*
*/
package edu.wustl.catissuecore.util;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import edu.wustl.catissuecore.TaskTimeCalculater;
import edu.wustl.catissuecore.bizlogic.CollectionProtocolBizLogic;
import edu.wustl.catissuecore.domain.CollectionProtocol;
import edu.wustl.catissuecore.domain.Site;
import edu.wustl.catissuecore.domain.User;
import edu.wustl.catissuecore.multiRepository.bean.SiteUserRolePrivilegeBean;
import edu.wustl.catissuecore.util.global.AppUtility;
import edu.wustl.catissuecore.util.global.Constants;
import edu.wustl.common.beans.NameValueBean;
import edu.wustl.common.domain.AbstractDomainObject;
import edu.wustl.common.exception.ApplicationException;
import edu.wustl.common.util.logger.Logger;
import edu.wustl.dao.DAO;
import edu.wustl.dao.QueryWhereClause;
import edu.wustl.dao.condition.EqualClause;
import edu.wustl.dao.exception.DAOException;
import edu.wustl.security.beans.SecurityDataBean;
import edu.wustl.security.exception.SMException;
import edu.wustl.security.locator.CSMGroupLocator;
import edu.wustl.security.manager.SecurityManagerFactory;
import edu.wustl.security.privilege.PrivilegeManager;
import gov.nih.nci.security.exceptions.CSException;
/**
* @author supriya_dankh
*
*/
public class CollectionProtocolAuthorization implements edu.wustl.catissuecore.util.Roles
{
/**
* Logger instance.
*/
private final transient Logger logger =
Logger.getCommonLogger(CollectionProtocolAuthorization.class) ;
public void authenticate(CollectionProtocol collectionProtocol, HashSet protectionObjects,
Map<String,SiteUserRolePrivilegeBean> rowIdMap) throws ApplicationException
{
TaskTimeCalculater cpAuth = TaskTimeCalculater.startTask("CP insert Authenticatge",
CollectionProtocolBizLogic.class);
try
{
PrivilegeManager privilegeManager = PrivilegeManager.getInstance();
privilegeManager.insertAuthorizationData(
getAuthorizationData(collectionProtocol, rowIdMap), protectionObjects,
getDynamicGroups(collectionProtocol), collectionProtocol.getObjectId());
}
catch (SMException e)
{
this.logger.error(e.getMessage(),e);
throw AppUtility.handleSMException(e);
}
finally
{
TaskTimeCalculater.endTask(cpAuth);
}
}
/**
* This method returns collection of UserGroupRoleProtectionGroup objects that speciefies the
* user group protection group linkage through a role. It also specifies the groups the protection
* elements returned by this class should be added to.
* @return
* @throws CSException
*/
protected Vector<SecurityDataBean> getAuthorizationData(AbstractDomainObject obj,
Map<String,SiteUserRolePrivilegeBean> rowIdMap) throws ApplicationException
{
Vector<SecurityDataBean> authorizationData = new Vector<SecurityDataBean>();
CollectionProtocol collectionProtocol = (CollectionProtocol) obj;
inserPIPrivileges(collectionProtocol, authorizationData);
insertCoordinatorPrivileges(collectionProtocol, authorizationData);
if(rowIdMap !=null)
{
insertCpUserPrivilegs(collectionProtocol, authorizationData, rowIdMap);
}
return authorizationData;
}
public void insertCpUserPrivilegs(CollectionProtocol collectionProtocol,
Vector<SecurityDataBean> authorizationData, Map<String,SiteUserRolePrivilegeBean> rowIdMap) throws ApplicationException
{
// int noOfUsers = rowIdMap.size();
Set<Site> siteCollection = new HashSet<Site>();
Set<User> userCollection = new HashSet<User>();
Set<Long> siteIds = new HashSet<Long>();
for (Iterator<String> mapItr = rowIdMap.keySet().iterator(); mapItr.hasNext(); )
{
String key = mapItr.next();
SiteUserRolePrivilegeBean siteUserRolePrivilegeBean = rowIdMap.get(key);
User user = siteUserRolePrivilegeBean.getUser();
if(!siteUserRolePrivilegeBean.isCustChecked())
{
List<Site> siteList = siteUserRolePrivilegeBean.getSiteList();
if(siteList != null && !siteList.isEmpty())
{
if(!siteIds.contains(siteList.get(0).getId()))
{
siteCollection.add(siteList.get(0));
siteIds.add(siteList.get(0).getId());
}
}
continue;
}
if(siteUserRolePrivilegeBean.isRowDeleted())
{
AppUtility.processDeletedPrivilegesOnCPPage(siteUserRolePrivilegeBean, collectionProtocol.getId());
}
else if(siteUserRolePrivilegeBean.isRowEdited())
{
updateAthurizationData(collectionProtocol,
authorizationData, siteUserRolePrivilegeBean, user);
}
if(!siteUserRolePrivilegeBean.isRowDeleted())
{
userCollection.add(user);
List<Site> siteList = siteUserRolePrivilegeBean.getSiteList();
addSiteIds(siteCollection, siteIds, siteList);
}
}
if(collectionProtocol.getSiteCollection()==null)
{
collectionProtocol.setSiteCollection(new HashSet<Site>());
}
collectionProtocol.getSiteCollection().clear();
collectionProtocol.getSiteCollection().addAll(siteCollection);
addUsers(collectionProtocol, userCollection);
}
/**
* method updates AthurizationData
* @param collectionProtocol
* @param authorizationData
* @param siteUserRolePrivilegeBean
* @param user
* @throws CSException
* @throws ApplicationException
*/
private void updateAthurizationData(
CollectionProtocol collectionProtocol,
Vector<SecurityDataBean> authorizationData,
SiteUserRolePrivilegeBean siteUserRolePrivilegeBean, User user)
throws ApplicationException
{
String roleName;
// siteCollection.addAll(siteList);
// user = siteUserRolePrivilegeBean.getUser();
// userCollection.add(user);
String defaultRole = siteUserRolePrivilegeBean.getRole().getValue();
roleName = setRoleNames(collectionProtocol,
siteUserRolePrivilegeBean, user, defaultRole);
Set<String> privileges = new HashSet<String>();
List<NameValueBean> privilegeList = siteUserRolePrivilegeBean.getPrivileges();
for(NameValueBean privilege : privilegeList)
{
privileges.add(privilege.getValue());
}
AppUtility.processRole(roleName);
PrivilegeManager.getInstance().createRole(roleName,
privileges);
String userId = String.valueOf(user.getCsmUserId());
gov.nih.nci.security.authorization.domainobjects.User csmUser = getUserByID(userId);
HashSet<gov.nih.nci.security.authorization.domainobjects.User> group = new HashSet<gov.nih.nci.security.authorization.domainobjects.User>();
group.add(csmUser);
String protectionGroupName = new String(CSMGroupLocator.getInstance().getPGName(collectionProtocol.getId(), CollectionProtocol.class));
SecurityDataBean userGrpRoleProtectionGrpBean = new SecurityDataBean();
userGrpRoleProtectionGrpBean.setUser("");
userGrpRoleProtectionGrpBean.setRoleName(roleName);
userGrpRoleProtectionGrpBean.setGroupName(Constants.getCPUserGroupName(collectionProtocol.getId(), user.getCsmUserId()));
userGrpRoleProtectionGrpBean.setProtGrpName(protectionGroupName);
userGrpRoleProtectionGrpBean.setGroup(group);
authorizationData.add(userGrpRoleProtectionGrpBean);
}
/**
* set RolNames for getting valid authorizationData.
* @param collectionProtocol
* @param siteUserRolePrivilegeBean
* @param user
* @param defaultRole
* @return
*/
private String setRoleNames(CollectionProtocol collectionProtocol,
SiteUserRolePrivilegeBean siteUserRolePrivilegeBean, User user,
String defaultRole)
{
String roleName;
if (defaultRole != null && (defaultRole.equalsIgnoreCase("-1") || defaultRole.equalsIgnoreCase("0") || defaultRole.equalsIgnoreCase(Constants.NON_ADMIN_USER)) )
{
roleName = Constants.getCPRoleName(collectionProtocol.getId(), user.getCsmUserId(), defaultRole);
} else
{
roleName = siteUserRolePrivilegeBean.getRole().getName();
}
return roleName;
}
/**
* add valid user.
* @param collectionProtocol
* @param userCollection
*/
public void addUsers(
CollectionProtocol collectionProtocol, Set<User> userCollection)
{
for (User user : userCollection)
{
boolean isPresent = false;
for (User setUser : collectionProtocol.getAssignedProtocolUserCollection())
{
if (user.getId().equals(setUser.getId()))
{
isPresent = true;
}
}
if (!isPresent)
{
collectionProtocol.getAssignedProtocolUserCollection().add(user);
}
}
}
/**
* add siteIds
* @param siteCollection
* @param siteIds
* @param siteList
*/
private void addSiteIds(Set<Site> siteCollection,
Set<Long> siteIds, List<Site> siteList)
{
for (Site site : siteList)
{
boolean isPresent = false;
for (Site setSite : siteCollection)
{
if (setSite.getId().equals(site.getId()))
{
isPresent = true;
}
}
if (!isPresent)
{
siteCollection.add(site);
siteIds.add(site.getId());
}
}
}
public void insertCoordinatorPrivileges(CollectionProtocol collectionProtocol,
Vector<SecurityDataBean> authorizationData) throws ApplicationException
{
DAO dao = null;
try
{
Collection<User> coordinators = collectionProtocol.getCoordinatorCollection();
HashSet<gov.nih.nci.security.authorization.domainobjects.User> group = new HashSet<gov.nih.nci.security.authorization.domainobjects.User>();
String userId = "";
dao = AppUtility.openDAOSession(null);
for (Iterator<User> it = coordinators.iterator(); it.hasNext();)
{
User aUser = it.next();
User exactUser = (User)dao.retrieveById(User.class.getName(), aUser.getId());
userId = String.valueOf(exactUser.getCsmUserId());
gov.nih.nci.security.authorization.domainobjects.User user = getUserByID(userId);
group.add(user);
}
String protectionGroupName = new String(CSMGroupLocator.getInstance().getPGName(collectionProtocol.getId(), CollectionProtocol.class));
SecurityDataBean userGroupRoleProtectionGroupBean = new SecurityDataBean();
userGroupRoleProtectionGroupBean.setUser(userId);
userGroupRoleProtectionGroupBean.setRoleName(COORDINATOR);
userGroupRoleProtectionGroupBean.setGroupName(CSMGroupLocator.getInstance().
getCoordinatorGroupName(collectionProtocol.getId(), CollectionProtocol.class));
userGroupRoleProtectionGroupBean.setProtGrpName(protectionGroupName);
userGroupRoleProtectionGroupBean.setGroup(group);
authorizationData.add(userGroupRoleProtectionGroupBean);
}
finally
{
AppUtility.closeDAOSession(dao);
}
}
public void inserPIPrivileges(CollectionProtocol collectionProtocol,
Vector<SecurityDataBean> authorizationData) throws ApplicationException
{
HashSet<gov.nih.nci.security.authorization.domainobjects.User> group = new HashSet<gov.nih.nci.security.authorization.domainobjects.User>();
String userId = String
.valueOf(collectionProtocol.getPrincipalInvestigator().getCsmUserId());
gov.nih.nci.security.authorization.domainobjects.User user = getUserByID(userId);
group.add(user);
String protectionGroupName = new String(CSMGroupLocator.getInstance().
getPGName(collectionProtocol.getId(), CollectionProtocol.class));
SecurityDataBean userGroupRoleProtectionGroupBean = new SecurityDataBean();
userGroupRoleProtectionGroupBean.setUser(userId);
userGroupRoleProtectionGroupBean.setRoleName(PI);
userGroupRoleProtectionGroupBean.setGroupName(CSMGroupLocator.getInstance().
getPIGroupName(collectionProtocol.getId(), CollectionProtocol.class));
userGroupRoleProtectionGroupBean.setProtGrpName(protectionGroupName);
userGroupRoleProtectionGroupBean.setGroup(group);
authorizationData.add(userGroupRoleProtectionGroupBean);
}
private String[] getDynamicGroups(AbstractDomainObject obj)
{
return null;
}
/**
* @param userId
* @return
* @throws SMException
*/
private gov.nih.nci.security.authorization.domainobjects.User getUserByID(String userId)
throws SMException
{
return SecurityManagerFactory.getSecurityManager().getUserById(userId);
}
//not required
/**
* @param collectionProtocol
* @return
* @throws DAOException
*/
public Long getCSMUserId(DAO dao, User user) throws DAOException
{
String[] selectColumnNames = {Constants.CSM_USER_ID};
//String[] whereColumnNames = {edu.wustl.common.util.global.Constants.SYSTEM_IDENTIFIER};
//String[] whereColumnCondition = {"="};
//Long[] whereColumnValues = {user.getId()};
QueryWhereClause queryWhereClause = new QueryWhereClause(User.class.getName());
queryWhereClause.addCondition(new EqualClause(edu.wustl.common.util.global.Constants.SYSTEM_IDENTIFIER,
user.getId()));
List csmUserIdList = dao.retrieve(User.class.getName(), selectColumnNames,
queryWhereClause);
if (!csmUserIdList.isEmpty())
{
Long csmUserId = (Long) csmUserIdList.get(0);
return csmUserId;
}
return null;
}
public boolean hasCoordinator(User coordinator, CollectionProtocol collectionProtocol)
{
boolean flag=false;
Collection<User> coordinatorCollection=collectionProtocol.getCoordinatorCollection();
if(coordinatorCollection!=null)
{
Iterator<User> iterator = coordinatorCollection.iterator();
while (iterator.hasNext())
{ User coordinatorOld = iterator.next();
if (coordinator.getId().equals(coordinatorOld.getId()))
{
flag= true;
break;
}
}
}
return flag;
}
public void updatePIAndCoordinatorGroup(DAO dao, CollectionProtocol collectionProtocol,
boolean operation) throws ApplicationException
{
Long principalInvestigatorId = collectionProtocol.getPrincipalInvestigator().getCsmUserId();
String userGroupName = CSMGroupLocator.getInstance().
getPIGroupName(collectionProtocol.getId(), CollectionProtocol.class);
if (operation)
{
SecurityManagerFactory.getSecurityManager().removeUserFromGroup(
userGroupName, principalInvestigatorId.toString());
}
else
{
SecurityManagerFactory.getSecurityManager().assignUserToGroup(
userGroupName, principalInvestigatorId.toString());
}
userGroupName = CSMGroupLocator.getInstance().
getCoordinatorGroupName(collectionProtocol.getId(), CollectionProtocol.class);
Collection<User> coordinatorColl=collectionProtocol.getCoordinatorCollection();
if(coordinatorColl!=null)
{
Iterator<User> iterator = coordinatorColl.iterator();
while (iterator.hasNext())
{
User user = iterator.next();
if (operation)
{
SecurityManagerFactory.getSecurityManager().removeUserFromGroup(
userGroupName, user.getCsmUserId().toString());
}
else
{
Long csmUserId = getCSMUserId(dao, user);
if (csmUserId != null)
{
SecurityManagerFactory.getSecurityManager()
.assignUserToGroup(userGroupName, csmUserId.toString());
}
}
}
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.raptor.legacy;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimaps;
import io.airlift.json.JsonCodec;
import io.airlift.json.JsonCodecFactory;
import io.airlift.json.ObjectMapperProvider;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import io.trino.plugin.raptor.legacy.metadata.ColumnInfo;
import io.trino.plugin.raptor.legacy.metadata.Distribution;
import io.trino.plugin.raptor.legacy.metadata.MetadataDao;
import io.trino.plugin.raptor.legacy.metadata.ShardDelta;
import io.trino.plugin.raptor.legacy.metadata.ShardInfo;
import io.trino.plugin.raptor.legacy.metadata.ShardManager;
import io.trino.plugin.raptor.legacy.metadata.Table;
import io.trino.plugin.raptor.legacy.metadata.TableColumn;
import io.trino.plugin.raptor.legacy.metadata.ViewResult;
import io.trino.plugin.raptor.legacy.systemtables.ColumnRangesSystemTable;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.ConnectorInsertTableHandle;
import io.trino.spi.connector.ConnectorMetadata;
import io.trino.spi.connector.ConnectorNewTableLayout;
import io.trino.spi.connector.ConnectorOutputMetadata;
import io.trino.spi.connector.ConnectorOutputTableHandle;
import io.trino.spi.connector.ConnectorPartitioningHandle;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorTableHandle;
import io.trino.spi.connector.ConnectorTableLayoutHandle;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.connector.ConnectorTablePartitioning;
import io.trino.spi.connector.ConnectorTableProperties;
import io.trino.spi.connector.ConnectorViewDefinition;
import io.trino.spi.connector.Constraint;
import io.trino.spi.connector.ConstraintApplicationResult;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.SchemaTablePrefix;
import io.trino.spi.connector.SystemTable;
import io.trino.spi.connector.TableNotFoundException;
import io.trino.spi.connector.ViewNotFoundException;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.statistics.ComputedStatistics;
import io.trino.spi.type.Type;
import org.skife.jdbi.v2.IDBI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.LongConsumer;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.MoreCollectors.toOptional;
import static io.airlift.json.JsonCodec.jsonCodec;
import static io.trino.plugin.raptor.legacy.RaptorBucketFunction.validateBucketType;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.BUCKET_NUMBER_COLUMN_NAME;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.SHARD_UUID_COLUMN_NAME;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.SHARD_UUID_COLUMN_TYPE;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.bucketNumberColumnHandle;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.isHiddenColumn;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.shardRowIdHandle;
import static io.trino.plugin.raptor.legacy.RaptorColumnHandle.shardUuidColumnHandle;
import static io.trino.plugin.raptor.legacy.RaptorErrorCode.RAPTOR_ERROR;
import static io.trino.plugin.raptor.legacy.RaptorSessionProperties.getExternalBatchId;
import static io.trino.plugin.raptor.legacy.RaptorSessionProperties.getOneSplitPerBucketThreshold;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.BUCKETED_ON_PROPERTY;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.BUCKET_COUNT_PROPERTY;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.DISTRIBUTION_NAME_PROPERTY;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.ORDERING_PROPERTY;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.ORGANIZED_PROPERTY;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.TEMPORAL_COLUMN_PROPERTY;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.getBucketColumns;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.getBucketCount;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.getDistributionName;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.getSortColumns;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.getTemporalColumn;
import static io.trino.plugin.raptor.legacy.RaptorTableProperties.isOrganized;
import static io.trino.plugin.raptor.legacy.systemtables.ColumnRangesSystemTable.getSourceTable;
import static io.trino.plugin.raptor.legacy.util.DatabaseUtil.daoTransaction;
import static io.trino.plugin.raptor.legacy.util.DatabaseUtil.onDemandDao;
import static io.trino.plugin.raptor.legacy.util.DatabaseUtil.runIgnoringConstraintViolation;
import static io.trino.plugin.raptor.legacy.util.DatabaseUtil.runTransaction;
import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY;
import static io.trino.spi.StandardErrorCode.NOT_FOUND;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.connector.SortOrder.ASC_NULLS_FIRST;
import static io.trino.spi.type.DateType.DATE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS;
import static java.lang.String.format;
import static java.util.Collections.nCopies;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toList;
public class RaptorMetadata
implements ConnectorMetadata
{
private static final Logger log = Logger.get(RaptorMetadata.class);
private static final JsonCodec<ShardInfo> SHARD_INFO_CODEC = jsonCodec(ShardInfo.class);
private static final JsonCodec<ShardDelta> SHARD_DELTA_CODEC = jsonCodec(ShardDelta.class);
private static final JsonCodec<ConnectorViewDefinition> VIEW_CODEC =
new JsonCodecFactory(new ObjectMapperProvider()).jsonCodec(ConnectorViewDefinition.class);
private final IDBI dbi;
private final MetadataDao dao;
private final ShardManager shardManager;
private final LongConsumer beginDeleteForTableId;
private final AtomicReference<Long> currentTransactionId = new AtomicReference<>();
public RaptorMetadata(IDBI dbi, ShardManager shardManager)
{
this(dbi, shardManager, tableId -> {});
}
public RaptorMetadata(IDBI dbi, ShardManager shardManager, LongConsumer beginDeleteForTableId)
{
this.dbi = requireNonNull(dbi, "dbi is null");
this.dao = onDemandDao(dbi, MetadataDao.class);
this.shardManager = requireNonNull(shardManager, "shardManager is null");
this.beginDeleteForTableId = requireNonNull(beginDeleteForTableId, "beginDeleteForTableId is null");
}
@Override
public List<String> listSchemaNames(ConnectorSession session)
{
return dao.listSchemaNames();
}
@Override
public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName)
{
return getTableHandle(tableName);
}
private RaptorTableHandle getTableHandle(SchemaTableName tableName)
{
requireNonNull(tableName, "tableName is null");
Table table = dao.getTableInformation(tableName.getSchemaName(), tableName.getTableName());
if (table == null) {
return null;
}
List<TableColumn> tableColumns = dao.listTableColumns(table.getTableId());
checkArgument(!tableColumns.isEmpty(), "Table '%s' does not have any columns", tableName);
return new RaptorTableHandle(
tableName.getSchemaName(),
tableName.getTableName(),
table.getTableId(),
table.getDistributionId(),
table.getDistributionName(),
table.getBucketCount(),
table.isOrganized(),
OptionalLong.empty(),
TupleDomain.all(),
table.getDistributionId().map(shardManager::getBucketAssignments),
false);
}
@Override
public Optional<SystemTable> getSystemTable(ConnectorSession session, SchemaTableName tableName)
{
return getSourceTable(tableName)
.map(this::getTableHandle)
.map(handle -> new ColumnRangesSystemTable(handle, dbi));
}
@Override
public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle)
{
RaptorTableHandle handle = (RaptorTableHandle) tableHandle;
SchemaTableName tableName = new SchemaTableName(handle.getSchemaName(), handle.getTableName());
List<TableColumn> tableColumns = dao.listTableColumns(handle.getTableId());
if (tableColumns.isEmpty()) {
throw new TableNotFoundException(tableName);
}
ImmutableMap.Builder<String, Object> properties = ImmutableMap.builder();
SortedMap<Integer, String> bucketing = new TreeMap<>();
SortedMap<Integer, String> ordering = new TreeMap<>();
for (TableColumn column : tableColumns) {
if (column.isTemporal()) {
properties.put(TEMPORAL_COLUMN_PROPERTY, column.getColumnName());
}
column.getBucketOrdinal().ifPresent(bucketOrdinal -> bucketing.put(bucketOrdinal, column.getColumnName()));
column.getSortOrdinal().ifPresent(sortOrdinal -> ordering.put(sortOrdinal, column.getColumnName()));
}
if (!bucketing.isEmpty()) {
properties.put(BUCKETED_ON_PROPERTY, ImmutableList.copyOf(bucketing.values()));
}
if (!ordering.isEmpty()) {
properties.put(ORDERING_PROPERTY, ImmutableList.copyOf(ordering.values()));
}
handle.getBucketCount().ifPresent(bucketCount -> properties.put(BUCKET_COUNT_PROPERTY, bucketCount));
handle.getDistributionName().ifPresent(distributionName -> properties.put(DISTRIBUTION_NAME_PROPERTY, distributionName));
// Only display organization property if set
if (handle.isOrganized()) {
properties.put(ORGANIZED_PROPERTY, true);
}
List<ColumnMetadata> columns = tableColumns.stream()
.map(TableColumn::toColumnMetadata)
.collect(toCollection(ArrayList::new));
columns.add(hiddenColumn(SHARD_UUID_COLUMN_NAME, SHARD_UUID_COLUMN_TYPE));
if (handle.isBucketed()) {
columns.add(hiddenColumn(BUCKET_NUMBER_COLUMN_NAME, INTEGER));
}
return new ConnectorTableMetadata(tableName, columns, properties.build());
}
@Override
public List<SchemaTableName> listTables(ConnectorSession session, Optional<String> schemaName)
{
return dao.listTables(schemaName.orElse(null));
}
@Override
public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle)
{
RaptorTableHandle raptorTableHandle = (RaptorTableHandle) tableHandle;
ImmutableMap.Builder<String, ColumnHandle> builder = ImmutableMap.builder();
for (TableColumn tableColumn : dao.listTableColumns(raptorTableHandle.getTableId())) {
builder.put(tableColumn.getColumnName(), getRaptorColumnHandle(tableColumn));
}
RaptorColumnHandle uuidColumn = shardUuidColumnHandle();
builder.put(uuidColumn.getColumnName(), uuidColumn);
if (raptorTableHandle.isBucketed()) {
RaptorColumnHandle bucketNumberColumn = bucketNumberColumnHandle();
builder.put(bucketNumberColumn.getColumnName(), bucketNumberColumn);
}
return builder.build();
}
@Override
public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle)
{
RaptorColumnHandle column = (RaptorColumnHandle) columnHandle;
if (isHiddenColumn(column.getColumnId())) {
return hiddenColumn(column.getColumnName(), column.getColumnType());
}
return new ColumnMetadata(column.getColumnName(), column.getColumnType());
}
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
requireNonNull(prefix, "prefix is null");
ImmutableListMultimap.Builder<SchemaTableName, ColumnMetadata> columns = ImmutableListMultimap.builder();
for (TableColumn tableColumn : dao.listTableColumns(prefix.getSchema().orElse(null), prefix.getTable().orElse(null))) {
ColumnMetadata columnMetadata = new ColumnMetadata(tableColumn.getColumnName(), tableColumn.getDataType());
columns.put(tableColumn.getTable(), columnMetadata);
}
return Multimaps.asMap(columns.build());
}
@Override
public boolean usesLegacyTableLayouts()
{
return false;
}
@Override
public Optional<ConstraintApplicationResult<ConnectorTableHandle>> applyFilter(ConnectorSession session, ConnectorTableHandle handle, Constraint constraint)
{
RaptorTableHandle table = (RaptorTableHandle) handle;
TupleDomain<RaptorColumnHandle> newDomain = constraint.getSummary().transform(RaptorColumnHandle.class::cast);
if (newDomain.equals(table.getConstraint())) {
return Optional.empty();
}
return Optional.of(new ConstraintApplicationResult<>(
new RaptorTableHandle(table.getSchemaName(),
table.getTableName(),
table.getTableId(),
table.getDistributionId(),
table.getDistributionName(),
table.getBucketCount(),
table.isOrganized(),
table.getTransactionId(),
newDomain.intersect(table.getConstraint()),
table.getBucketAssignments(),
table.isDelete()),
constraint.getSummary()));
}
@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle handle)
{
RaptorTableHandle table = (RaptorTableHandle) handle;
if (table.getPartitioningHandle().isEmpty()) {
return new ConnectorTableProperties();
}
List<RaptorColumnHandle> bucketColumnHandles = getBucketColumnHandles(table.getTableId());
RaptorPartitioningHandle partitioning = table.getPartitioningHandle().get();
boolean oneSplitPerBucket = table.getBucketCount().getAsInt() >= getOneSplitPerBucketThreshold(session);
return new ConnectorTableProperties(
TupleDomain.all(),
Optional.of(new ConnectorTablePartitioning(
partitioning,
ImmutableList.copyOf(bucketColumnHandles))),
oneSplitPerBucket ? Optional.of(ImmutableSet.copyOf(bucketColumnHandles)) : Optional.empty(),
Optional.empty(),
ImmutableList.of());
}
@Override
public Optional<ConnectorNewTableLayout> getNewTableLayout(ConnectorSession session, ConnectorTableMetadata metadata)
{
ImmutableMap.Builder<String, RaptorColumnHandle> map = ImmutableMap.builder();
long columnId = 1;
for (ColumnMetadata column : metadata.getColumns()) {
map.put(column.getName(), new RaptorColumnHandle(column.getName(), columnId, column.getType()));
columnId++;
}
Optional<DistributionInfo> distribution = getOrCreateDistribution(map.build(), metadata.getProperties());
if (distribution.isEmpty()) {
return Optional.empty();
}
List<String> partitionColumns = distribution.get().getBucketColumns().stream()
.map(RaptorColumnHandle::getColumnName)
.collect(toList());
ConnectorPartitioningHandle partitioning = getPartitioningHandle(distribution.get().getDistributionId());
return Optional.of(new ConnectorNewTableLayout(partitioning, partitionColumns));
}
private RaptorPartitioningHandle getPartitioningHandle(long distributionId)
{
return new RaptorPartitioningHandle(distributionId, shardManager.getBucketAssignments(distributionId));
}
private Optional<DistributionInfo> getOrCreateDistribution(Map<String, RaptorColumnHandle> columnHandleMap, Map<String, Object> properties)
{
OptionalInt bucketCount = getBucketCount(properties);
List<RaptorColumnHandle> bucketColumnHandles = getBucketColumnHandles(getBucketColumns(properties), columnHandleMap);
if (bucketCount.isPresent() && bucketColumnHandles.isEmpty()) {
throw new TrinoException(INVALID_TABLE_PROPERTY, format("Must specify '%s' along with '%s'", BUCKETED_ON_PROPERTY, BUCKET_COUNT_PROPERTY));
}
if (bucketCount.isEmpty() && !bucketColumnHandles.isEmpty()) {
throw new TrinoException(INVALID_TABLE_PROPERTY, format("Must specify '%s' along with '%s'", BUCKET_COUNT_PROPERTY, BUCKETED_ON_PROPERTY));
}
ImmutableList.Builder<Type> bucketColumnTypes = ImmutableList.builder();
for (RaptorColumnHandle column : bucketColumnHandles) {
validateBucketType(column.getColumnType());
bucketColumnTypes.add(column.getColumnType());
}
long distributionId;
String distributionName = getDistributionName(properties);
if (distributionName != null) {
if (bucketColumnHandles.isEmpty()) {
throw new TrinoException(INVALID_TABLE_PROPERTY, format("Must specify '%s' along with '%s'", BUCKETED_ON_PROPERTY, DISTRIBUTION_NAME_PROPERTY));
}
Distribution distribution = dao.getDistribution(distributionName);
if (distribution == null) {
if (bucketCount.isEmpty()) {
throw new TrinoException(INVALID_TABLE_PROPERTY, "Distribution does not exist and bucket count is not specified");
}
distribution = getOrCreateDistribution(distributionName, bucketColumnTypes.build(), bucketCount.getAsInt());
}
distributionId = distribution.getId();
if (bucketCount.isPresent() && (distribution.getBucketCount() != bucketCount.getAsInt())) {
throw new TrinoException(INVALID_TABLE_PROPERTY, "Bucket count must match distribution");
}
if (!distribution.getColumnTypes().equals(bucketColumnTypes.build())) {
throw new TrinoException(INVALID_TABLE_PROPERTY, "Bucket column types must match distribution");
}
}
else if (bucketCount.isPresent()) {
String types = Distribution.serializeColumnTypes(bucketColumnTypes.build());
distributionId = dao.insertDistribution(null, types, bucketCount.getAsInt());
}
else {
return Optional.empty();
}
shardManager.createBuckets(distributionId, bucketCount.getAsInt());
return Optional.of(new DistributionInfo(distributionId, bucketCount.getAsInt(), bucketColumnHandles));
}
private Distribution getOrCreateDistribution(String name, List<Type> columnTypes, int bucketCount)
{
String types = Distribution.serializeColumnTypes(columnTypes);
runIgnoringConstraintViolation(() -> dao.insertDistribution(name, types, bucketCount));
Distribution distribution = dao.getDistribution(name);
if (distribution == null) {
throw new TrinoException(RAPTOR_ERROR, "Distribution does not exist after insert");
}
return distribution;
}
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting)
{
Optional<ConnectorNewTableLayout> layout = getNewTableLayout(session, tableMetadata);
finishCreateTable(session, beginCreateTable(session, tableMetadata, layout), ImmutableList.of(), ImmutableList.of());
}
@Override
public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle)
{
RaptorTableHandle raptorHandle = (RaptorTableHandle) tableHandle;
shardManager.dropTable(raptorHandle.getTableId());
}
@Override
public void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTableName)
{
RaptorTableHandle table = (RaptorTableHandle) tableHandle;
runTransaction(dbi, (handle, status) -> {
MetadataDao dao = handle.attach(MetadataDao.class);
dao.renameTable(table.getTableId(), newTableName.getSchemaName(), newTableName.getTableName());
return null;
});
}
@Override
public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column)
{
RaptorTableHandle table = (RaptorTableHandle) tableHandle;
// Always add new columns to the end.
List<TableColumn> existingColumns = dao.listTableColumns(table.getSchemaName(), table.getTableName());
TableColumn lastColumn = existingColumns.get(existingColumns.size() - 1);
long columnId = lastColumn.getColumnId() + 1;
int ordinalPosition = lastColumn.getOrdinalPosition() + 1;
String type = column.getType().getTypeId().getId();
daoTransaction(dbi, MetadataDao.class, dao -> {
dao.insertColumn(table.getTableId(), columnId, column.getName(), ordinalPosition, type, null, null);
dao.updateTableVersion(table.getTableId(), session.getStart().toEpochMilli());
});
shardManager.addColumn(table.getTableId(), new ColumnInfo(columnId, column.getType()));
}
@Override
public void renameColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle source, String target)
{
RaptorTableHandle table = (RaptorTableHandle) tableHandle;
RaptorColumnHandle sourceColumn = (RaptorColumnHandle) source;
daoTransaction(dbi, MetadataDao.class, dao -> {
dao.renameColumn(table.getTableId(), sourceColumn.getColumnId(), target);
dao.updateTableVersion(table.getTableId(), session.getStart().toEpochMilli());
});
}
@Override
public void dropColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle column)
{
RaptorTableHandle table = (RaptorTableHandle) tableHandle;
RaptorColumnHandle raptorColumn = (RaptorColumnHandle) column;
List<TableColumn> existingColumns = dao.listTableColumns(table.getSchemaName(), table.getTableName());
if (existingColumns.size() <= 1) {
throw new TrinoException(NOT_SUPPORTED, "Cannot drop the only column in a table");
}
long maxColumnId = existingColumns.stream().mapToLong(TableColumn::getColumnId).max().getAsLong();
if (raptorColumn.getColumnId() == maxColumnId) {
throw new TrinoException(NOT_SUPPORTED, "Cannot drop the column which has the largest column ID in the table");
}
if (getBucketColumnHandles(table.getTableId()).contains(column)) {
throw new TrinoException(NOT_SUPPORTED, "Cannot drop bucket columns");
}
Optional.ofNullable(dao.getTemporalColumnId(table.getTableId())).ifPresent(tempColumnId -> {
if (raptorColumn.getColumnId() == tempColumnId) {
throw new TrinoException(NOT_SUPPORTED, "Cannot drop the temporal column");
}
});
if (getSortColumnHandles(table.getTableId()).contains(raptorColumn)) {
throw new TrinoException(NOT_SUPPORTED, "Cannot drop sort columns");
}
daoTransaction(dbi, MetadataDao.class, dao -> {
dao.dropColumn(table.getTableId(), raptorColumn.getColumnId());
dao.updateTableVersion(table.getTableId(), session.getStart().toEpochMilli());
});
// TODO: drop column from index table
}
@Override
public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, Optional<ConnectorNewTableLayout> layout)
{
if (viewExists(session, tableMetadata.getTable())) {
throw new TrinoException(ALREADY_EXISTS, "View already exists: " + tableMetadata.getTable());
}
Optional<RaptorPartitioningHandle> partitioning = layout
.map(ConnectorNewTableLayout::getPartitioning)
.map(Optional::get)
.map(RaptorPartitioningHandle.class::cast);
ImmutableList.Builder<RaptorColumnHandle> columnHandles = ImmutableList.builder();
ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
long columnId = 1;
for (ColumnMetadata column : tableMetadata.getColumns()) {
columnHandles.add(new RaptorColumnHandle(column.getName(), columnId, column.getType()));
columnTypes.add(column.getType());
columnId++;
}
Map<String, RaptorColumnHandle> columnHandleMap = Maps.uniqueIndex(columnHandles.build(), RaptorColumnHandle::getColumnName);
List<RaptorColumnHandle> sortColumnHandles = getSortColumnHandles(getSortColumns(tableMetadata.getProperties()), columnHandleMap);
Optional<RaptorColumnHandle> temporalColumnHandle = getTemporalColumnHandle(getTemporalColumn(tableMetadata.getProperties()), columnHandleMap);
if (temporalColumnHandle.isPresent()) {
RaptorColumnHandle column = temporalColumnHandle.get();
if (!column.getColumnType().equals(TIMESTAMP_MILLIS) && !column.getColumnType().equals(DATE)) {
throw new TrinoException(NOT_SUPPORTED, "Temporal column must be of type timestamp or date: " + column.getColumnName());
}
}
boolean organized = isOrganized(tableMetadata.getProperties());
if (organized) {
if (temporalColumnHandle.isPresent()) {
throw new TrinoException(NOT_SUPPORTED, "Table with temporal columns cannot be organized");
}
if (sortColumnHandles.isEmpty()) {
throw new TrinoException(NOT_SUPPORTED, "Table organization requires an ordering");
}
}
long transactionId = shardManager.beginTransaction();
setTransactionId(transactionId);
Optional<DistributionInfo> distribution = partitioning.map(handle ->
getDistributionInfo(handle.getDistributionId(), columnHandleMap, tableMetadata.getProperties()));
return new RaptorOutputTableHandle(
transactionId,
tableMetadata.getTable().getSchemaName(),
tableMetadata.getTable().getTableName(),
columnHandles.build(),
columnTypes.build(),
sortColumnHandles,
nCopies(sortColumnHandles.size(), ASC_NULLS_FIRST),
temporalColumnHandle,
distribution.map(info -> OptionalLong.of(info.getDistributionId())).orElse(OptionalLong.empty()),
distribution.map(info -> OptionalInt.of(info.getBucketCount())).orElse(OptionalInt.empty()),
organized,
distribution.map(DistributionInfo::getBucketColumns).orElse(ImmutableList.of()));
}
private DistributionInfo getDistributionInfo(long distributionId, Map<String, RaptorColumnHandle> columnHandleMap, Map<String, Object> properties)
{
Distribution distribution = dao.getDistribution(distributionId);
if (distribution == null) {
throw new TrinoException(RAPTOR_ERROR, "Distribution ID does not exist: " + distributionId);
}
List<RaptorColumnHandle> bucketColumnHandles = getBucketColumnHandles(getBucketColumns(properties), columnHandleMap);
return new DistributionInfo(distributionId, distribution.getBucketCount(), bucketColumnHandles);
}
private static Optional<RaptorColumnHandle> getTemporalColumnHandle(String temporalColumn, Map<String, RaptorColumnHandle> columnHandleMap)
{
if (temporalColumn == null) {
return Optional.empty();
}
RaptorColumnHandle handle = columnHandleMap.get(temporalColumn);
if (handle == null) {
throw new TrinoException(NOT_FOUND, "Temporal column does not exist: " + temporalColumn);
}
return Optional.of(handle);
}
private static List<RaptorColumnHandle> getSortColumnHandles(List<String> sortColumns, Map<String, RaptorColumnHandle> columnHandleMap)
{
ImmutableList.Builder<RaptorColumnHandle> columnHandles = ImmutableList.builder();
for (String column : sortColumns) {
if (!columnHandleMap.containsKey(column)) {
throw new TrinoException(NOT_FOUND, "Ordering column does not exist: " + column);
}
columnHandles.add(columnHandleMap.get(column));
}
return columnHandles.build();
}
private static List<RaptorColumnHandle> getBucketColumnHandles(List<String> bucketColumns, Map<String, RaptorColumnHandle> columnHandleMap)
{
ImmutableList.Builder<RaptorColumnHandle> columnHandles = ImmutableList.builder();
for (String column : bucketColumns) {
if (!columnHandleMap.containsKey(column)) {
throw new TrinoException(NOT_FOUND, "Bucketing column does not exist: " + column);
}
columnHandles.add(columnHandleMap.get(column));
}
return columnHandles.build();
}
@Override
public Optional<ConnectorOutputMetadata> finishCreateTable(ConnectorSession session, ConnectorOutputTableHandle outputTableHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics)
{
RaptorOutputTableHandle table = (RaptorOutputTableHandle) outputTableHandle;
long transactionId = table.getTransactionId();
long updateTime = session.getStart().toEpochMilli();
long newTableId = runTransaction(dbi, (dbiHandle, status) -> {
MetadataDao dao = dbiHandle.attach(MetadataDao.class);
Long distributionId = table.getDistributionId().isPresent() ? table.getDistributionId().getAsLong() : null;
// TODO: update default value of organization_enabled to true
long tableId = dao.insertTable(table.getSchemaName(), table.getTableName(), true, table.isOrganized(), distributionId, updateTime);
List<RaptorColumnHandle> sortColumnHandles = table.getSortColumnHandles();
List<RaptorColumnHandle> bucketColumnHandles = table.getBucketColumnHandles();
for (int i = 0; i < table.getColumnTypes().size(); i++) {
RaptorColumnHandle column = table.getColumnHandles().get(i);
int columnId = i + 1;
String type = table.getColumnTypes().get(i).getTypeId().getId();
Integer sortPosition = sortColumnHandles.contains(column) ? sortColumnHandles.indexOf(column) : null;
Integer bucketPosition = bucketColumnHandles.contains(column) ? bucketColumnHandles.indexOf(column) : null;
dao.insertColumn(tableId, columnId, column.getColumnName(), i, type, sortPosition, bucketPosition);
if (table.getTemporalColumnHandle().isPresent() && table.getTemporalColumnHandle().get().equals(column)) {
dao.updateTemporalColumnId(tableId, columnId);
}
}
return tableId;
});
List<ColumnInfo> columns = table.getColumnHandles().stream().map(ColumnInfo::fromHandle).collect(toList());
OptionalLong temporalColumnId = table.getTemporalColumnHandle().map(RaptorColumnHandle::getColumnId)
.map(OptionalLong::of)
.orElse(OptionalLong.empty());
// TODO: refactor this to avoid creating an empty table on failure
shardManager.createTable(newTableId, columns, table.getBucketCount().isPresent(), temporalColumnId);
shardManager.commitShards(transactionId, newTableId, columns, parseFragments(fragments), Optional.empty(), updateTime);
clearRollback();
return Optional.empty();
}
@Override
public ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle)
{
RaptorTableHandle handle = (RaptorTableHandle) tableHandle;
long tableId = handle.getTableId();
ImmutableList.Builder<RaptorColumnHandle> columnHandlesBuilder = ImmutableList.builder();
ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
for (TableColumn column : dao.listTableColumns(tableId)) {
columnHandlesBuilder.add(new RaptorColumnHandle(column.getColumnName(), column.getColumnId(), column.getDataType()));
columnTypes.add(column.getDataType());
}
long transactionId = shardManager.beginTransaction();
setTransactionId(transactionId);
Optional<String> externalBatchId = getExternalBatchId(session);
List<RaptorColumnHandle> sortColumnHandles = getSortColumnHandles(tableId);
List<RaptorColumnHandle> bucketColumnHandles = getBucketColumnHandles(tableId);
ImmutableList<RaptorColumnHandle> columnHandles = columnHandlesBuilder.build();
Optional<RaptorColumnHandle> temporalColumnHandle = Optional.ofNullable(dao.getTemporalColumnId(tableId))
.map(temporalColumnId -> getOnlyElement(columnHandles.stream()
.filter(columnHandle -> columnHandle.getColumnId() == temporalColumnId)
.collect(toList())));
return new RaptorInsertTableHandle(
transactionId,
tableId,
columnHandles,
columnTypes.build(),
externalBatchId,
sortColumnHandles,
nCopies(sortColumnHandles.size(), ASC_NULLS_FIRST),
handle.getBucketCount(),
bucketColumnHandles,
temporalColumnHandle);
}
private List<RaptorColumnHandle> getSortColumnHandles(long tableId)
{
return dao.listSortColumns(tableId).stream()
.map(this::getRaptorColumnHandle)
.collect(toList());
}
private List<RaptorColumnHandle> getBucketColumnHandles(long tableId)
{
return dao.listBucketColumns(tableId).stream()
.map(this::getRaptorColumnHandle)
.collect(toList());
}
@Override
public Optional<ConnectorOutputMetadata> finishInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics)
{
RaptorInsertTableHandle handle = (RaptorInsertTableHandle) insertHandle;
long transactionId = handle.getTransactionId();
long tableId = handle.getTableId();
Optional<String> externalBatchId = handle.getExternalBatchId();
List<ColumnInfo> columns = handle.getColumnHandles().stream().map(ColumnInfo::fromHandle).collect(toList());
long updateTime = session.getStart().toEpochMilli();
Collection<ShardInfo> shards = parseFragments(fragments);
log.info("Committing insert into tableId %s (queryId: %s, shards: %s, columns: %s)", handle.getTableId(), session.getQueryId(), shards.size(), columns.size());
shardManager.commitShards(transactionId, tableId, columns, shards, externalBatchId, updateTime);
clearRollback();
return Optional.empty();
}
@Override
public ColumnHandle getDeleteRowIdColumnHandle(ConnectorSession session, ConnectorTableHandle tableHandle)
{
return shardRowIdHandle();
}
@Override
public ConnectorTableHandle beginDelete(ConnectorSession session, ConnectorTableHandle tableHandle)
{
RaptorTableHandle handle = (RaptorTableHandle) tableHandle;
beginDeleteForTableId.accept(handle.getTableId());
long transactionId = shardManager.beginTransaction();
setTransactionId(transactionId);
return new RaptorTableHandle(
handle.getSchemaName(),
handle.getTableName(),
handle.getTableId(),
handle.getDistributionId(),
handle.getDistributionName(),
handle.getBucketCount(),
handle.isOrganized(),
OptionalLong.of(transactionId),
TupleDomain.all(),
handle.getBucketAssignments(),
true);
}
@Override
public void finishDelete(ConnectorSession session, ConnectorTableHandle tableHandle, Collection<Slice> fragments)
{
RaptorTableHandle table = (RaptorTableHandle) tableHandle;
long transactionId = table.getTransactionId().getAsLong();
long tableId = table.getTableId();
List<ColumnInfo> columns = getColumnHandles(session, tableHandle).values().stream()
.map(RaptorColumnHandle.class::cast)
.map(ColumnInfo::fromHandle).collect(toList());
ImmutableSet.Builder<UUID> oldShardUuidsBuilder = ImmutableSet.builder();
ImmutableList.Builder<ShardInfo> newShardsBuilder = ImmutableList.builder();
fragments.stream()
.map(fragment -> SHARD_DELTA_CODEC.fromJson(fragment.getBytes()))
.forEach(delta -> {
oldShardUuidsBuilder.addAll(delta.getOldShardUuids());
newShardsBuilder.addAll(delta.getNewShards());
});
Set<UUID> oldShardUuids = oldShardUuidsBuilder.build();
List<ShardInfo> newShards = newShardsBuilder.build();
OptionalLong updateTime = OptionalLong.of(session.getStart().toEpochMilli());
log.info("Finishing delete for tableId %s (removed: %s, rewritten: %s)", tableId, oldShardUuids.size() - newShards.size(), newShards.size());
shardManager.replaceShardUuids(transactionId, tableId, columns, oldShardUuids, newShards, updateTime);
clearRollback();
}
@Override
public boolean supportsMetadataDelete(ConnectorSession session, ConnectorTableHandle tableHandle, ConnectorTableLayoutHandle tableLayoutHandle)
{
return false;
}
@Override
public void createView(ConnectorSession session, SchemaTableName viewName, ConnectorViewDefinition definition, boolean replace)
{
String schemaName = viewName.getSchemaName();
String tableName = viewName.getTableName();
String viewData = VIEW_CODEC.toJson(definition);
if (getTableHandle(viewName) != null) {
throw new TrinoException(ALREADY_EXISTS, "Table already exists: " + viewName);
}
if (replace) {
daoTransaction(dbi, MetadataDao.class, dao -> {
dao.dropView(schemaName, tableName);
dao.insertView(schemaName, tableName, viewData);
});
return;
}
try {
dao.insertView(schemaName, tableName, viewData);
}
catch (TrinoException e) {
if (viewExists(session, viewName)) {
throw new TrinoException(ALREADY_EXISTS, "View already exists: " + viewName);
}
throw e;
}
}
@Override
public void dropView(ConnectorSession session, SchemaTableName viewName)
{
if (!viewExists(session, viewName)) {
throw new ViewNotFoundException(viewName);
}
dao.dropView(viewName.getSchemaName(), viewName.getTableName());
}
@Override
public List<SchemaTableName> listViews(ConnectorSession session, Optional<String> schemaName)
{
return dao.listViews(schemaName.orElse(null));
}
@Override
public Map<SchemaTableName, ConnectorViewDefinition> getViews(ConnectorSession session, Optional<String> schemaName)
{
ImmutableMap.Builder<SchemaTableName, ConnectorViewDefinition> map = ImmutableMap.builder();
for (ViewResult view : dao.getViews(schemaName.orElse(null), null)) {
map.put(view.getName(), VIEW_CODEC.fromJson(view.getData()));
}
return map.build();
}
@Override
public Optional<ConnectorViewDefinition> getView(ConnectorSession session, SchemaTableName viewName)
{
return dao.getViews(viewName.getSchemaName(), viewName.getTableName()).stream()
.map(view -> VIEW_CODEC.fromJson(view.getData()))
.collect(toOptional());
}
private boolean viewExists(ConnectorSession session, SchemaTableName viewName)
{
return getView(session, viewName).isPresent();
}
private RaptorColumnHandle getRaptorColumnHandle(TableColumn tableColumn)
{
return new RaptorColumnHandle(tableColumn.getColumnName(), tableColumn.getColumnId(), tableColumn.getDataType());
}
private static Collection<ShardInfo> parseFragments(Collection<Slice> fragments)
{
return fragments.stream()
.map(fragment -> SHARD_INFO_CODEC.fromJson(fragment.getBytes()))
.collect(toList());
}
private static ColumnMetadata hiddenColumn(String name, Type type)
{
return ColumnMetadata.builder()
.setName(name)
.setType(type)
.setHidden(true)
.build();
}
private void setTransactionId(long transactionId)
{
checkState(currentTransactionId.compareAndSet(null, transactionId), "current transaction ID already set");
}
private void clearRollback()
{
currentTransactionId.set(null);
}
public void rollback()
{
Long transactionId = currentTransactionId.getAndSet(null);
if (transactionId != null) {
shardManager.rollbackTransaction(transactionId);
}
}
private static class DistributionInfo
{
private final long distributionId;
private final int bucketCount;
private final List<RaptorColumnHandle> bucketColumns;
public DistributionInfo(long distributionId, int bucketCount, List<RaptorColumnHandle> bucketColumns)
{
this.distributionId = distributionId;
this.bucketCount = bucketCount;
this.bucketColumns = ImmutableList.copyOf(requireNonNull(bucketColumns, "bucketColumns is null"));
}
public long getDistributionId()
{
return distributionId;
}
public int getBucketCount()
{
return bucketCount;
}
public List<RaptorColumnHandle> getBucketColumns()
{
return bucketColumns;
}
}
}
|
|
/**
* This document is a part of the source code and related artifacts
* for CollectionSpace, an open source collections management system
* for museums and related institutions:
* http://www.collectionspace.org
* http://wiki.collectionspace.org
* Copyright 2009 University of California at Berkeley
* Licensed under the Educational Community License (ECL), Version 2.0.
* You may not use this file except in compliance with this License.
* You may obtain a copy of the ECL 2.0 License at
* https://source.collectionspace.org/collection-space/LICENSE.txt
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.collectionspace.services.client;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.util.Properties;
import javax.ws.rs.PathParam;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope; //import org.collectionspace.services.collectionobject.CollectionobjectsCommonList;
import org.apache.http.impl.client.DefaultHttpClient;
import org.collectionspace.services.common.authorityref.AuthorityRefList;
import org.collectionspace.services.jaxb.AbstractCommonList;
import org.jboss.resteasy.client.ClientResponse; //import org.collectionspace.services.common.context.ServiceContext;
import org.jboss.resteasy.client.ProxyFactory;
import org.jboss.resteasy.client.core.executors.ApacheHttpClient4Executor;
import org.jboss.resteasy.client.jaxrs.ResteasyClient;
//import org.jboss.resteasy.client.core.executors.ApacheHttpClientExecutor;
import org.jboss.resteasy.plugins.providers.RegisterBuiltin;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Private class for JAX-RS authentication
*/
class Authenticator implements ClientRequestFilter {
private final String user;
private final String password;
public Authenticator(String user, String password) {
this.user = user;
this.password = password;
}
@Override
public void filter(ClientRequestContext requestContext) throws IOException {
MultivaluedMap<String, Object> headers = requestContext.getHeaders();
final String basicAuthentication = getBasicAuthentication();
headers.add("Authorization", basicAuthentication);
}
private String getBasicAuthentication() {
String result = null;
String token = this.user + ":" + this.password;
try {
result = "Basic " + DatatypeConverter.printBase64Binary(token.getBytes("UTF-8"));
} catch (UnsupportedEncodingException ex) {
throw new IllegalStateException("Cannot encode with UTF-8", ex);
}
return result;
}
}
/**
* BaseServiceClient is an abstract base client of all service clients FIXME:
* http://issues.collectionspace.org/browse/CSPACE-1684
* @param <CommonListType>
* @param <ListType>
*
* CLT - List type
* REQUEST_PT - Request payload type
* RESPONSE_PT - Response payload type
* P - Proxy type
*/
public abstract class AbstractServiceClientImpl<CLT, REQUEST_PT, RESPONSE_PT, P extends CollectionSpaceProxy<CLT>>
implements CollectionSpaceClient<CLT, REQUEST_PT, RESPONSE_PT, P> {
/** The logger. */
protected final Logger logger = LoggerFactory.getLogger(AbstractServiceClientImpl.class);
/**
* The character used to separate the words in a part label
*/
public static final String PART_LABEL_SEPARATOR = "_";
/** The Constant PART_COMMON_LABEL. */
public static final String PART_COMMON_LABEL = "common";
/** The properties. */
private Properties properties = new Properties();
/** The url. */
private URL url;
/** The http client. */
private HttpClient httpClient;
private org.apache.http.client.HttpClient httpClient4;
/** The RESTEasy proxy */
private P proxy;
/**
* Gets the logger.
*
* @return the logger
*/
public Logger getLogger() {
return logger;
}
abstract public String getServicePathComponent();
/**
* Returns a UTF-8 encode byte array from 'string'
*
* @return UTF-8 encoded byte array
*/
protected byte[] getBytes(String string) {
byte[] result = null;
try {
result = string.getBytes("UTF8");
} catch (UnsupportedEncodingException e) {
if (logger.isWarnEnabled() == true) {
logger.warn(e.getMessage(), e);
}
}
return result;
}
/*
* Subclasses can override this method to return their AbstractCommonList subclass
*/
protected Class<CLT> getCommonListType() {
return (Class<CLT>) AbstractCommonList.class;
}
/**
* Gets the common part name.
*
* @return the common part name
*/
@Override
public String getCommonPartName() {
return getCommonPartName(getServiceName());
}
/**
* Gets the common part name.
*
* @param servicePathComponent
* the service path component
* @return the common part name
*/
protected String getCommonPartName(String commonPrefix) {
return commonPrefix + PART_LABEL_SEPARATOR + PART_COMMON_LABEL;
}
// /**
// * Gets the service path component.
// *
// * @return the service path component
// */
// abstract public String getServicePathComponent();
/**
* Instantiates a new abstract service client impl.
*/
protected AbstractServiceClientImpl() {
readProperties();
setupHttpClient();
setupHttpClient4(); // temp fix for CSPACE-6281
ResteasyProviderFactory factory = ResteasyProviderFactory.getInstance();
RegisterBuiltin.register(factory);
setProxy();
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#getProperty
* (java.lang.String)
*/
@Override
public String getProperty(String propName) {
return properties.getProperty(propName);
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#setProperty
* (java.lang.String, java.lang.String)
*/
@Override
public void setProperty(String propName, String value) {
properties.setProperty(propName, value);
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#removeProperty
* (java.lang.String)
*/
@Override
public Object removeProperty(String propName) {
return properties.remove(propName);
}
/**
* Prints the properties.
*/
public void printProperties() {
for (Object kobj : properties.keySet()) {
String key = (String) kobj;
logger.trace("begin property name=" + key + " value="
+ properties.get(key));
}
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#getBaseURL()
*/
@Override
public String getBaseURL() {
return properties.getProperty(URL_PROPERTY);
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#getHttpClient()
*/
@Override
public HttpClient getHttpClient() {
return httpClient;
}
public org.apache.http.client.HttpClient getHttpClient4() {
return httpClient4;
}
/*
* (non-Javadoc)
*
* @see org.collectionspace.services.client.CollectionSpaceClient#useAuth()
*/
@Override
public boolean useAuth() {
String auth = properties.getProperty(AUTH_PROPERTY);
return Boolean.valueOf(auth);
}
/*
* (non-Javadoc)
*
* @see org.collectionspace.services.client.CollectionSpaceClient#useSSL()
*/
@Override
public boolean useSSL() {
String ssl = properties.getProperty(SSL_PROPERTY);
return Boolean.valueOf(ssl);
}
/**
* readProperties reads properties from system class path as well as it
* overrides properties made available using command line
*
* @exception RuntimeException
*/
private void readProperties() {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
InputStream is = null;
try {
is = cl.getResourceAsStream("collectionspace-client.properties");
properties.load(is);
if (logger.isDebugEnabled()) {
printProperties();
}
String spec = System.getProperty(URL_PROPERTY);
if (spec != null && !"".equals(spec)) {
properties.setProperty(URL_PROPERTY, spec);
}
spec = properties.getProperty(URL_PROPERTY);
url = new URL(spec);
logger.debug("readProperties() using url=" + url);
String auth = System.getProperty(AUTH_PROPERTY);
if (auth != null && !"".equals(auth)) {
properties.setProperty(AUTH_PROPERTY, auth);
}
String ssl = System.getProperty(SSL_PROPERTY);
if (ssl != null && !"".equals(ssl)) {
properties.setProperty(AUTH_PROPERTY, ssl);
}
String user = System.getProperty(USER_PROPERTY);
if (user != null && !"".equals(user)) {
properties.setProperty(USER_PROPERTY, user);
}
String password = System.getProperty(PASSWORD_PROPERTY);
if (password != null && !"".equals(password)) {
properties.setProperty(PASSWORD_PROPERTY, password);
}
String tenant = System.getProperty(TENANT_PROPERTY);
if (tenant != null && !"".equals(tenant)) {
properties.setProperty(TENANT_PROPERTY, tenant);
}
if (logger.isDebugEnabled()) {
printProperties();
}
} catch (Exception e) {
logger.debug("Caught exception while reading properties", e);
throw new RuntimeException(e);
} finally {
if (is != null) {
try {
is.close();
} catch (Exception e) {
if (logger.isDebugEnabled() == true) {
e.printStackTrace();
}
}
}
}
}
/**
* setupHttpClient sets up HTTP client for the service client the setup
* process relies on the following properties URL_PROPERTY USER_PROPERTY
* PASSWORD_PROPERTY AUTH_PROPERTY SSL_PROPERTY
*/
@Override
public void setupHttpClient() {
try {
this.httpClient = new HttpClient();
if (useAuth()) {
String user = properties.getProperty(USER_PROPERTY);
String password = properties.getProperty(PASSWORD_PROPERTY);
if (logger.isDebugEnabled()) {
logger.debug("setupHttpClient() using url=" + url + " user="
+ user + " password=" + password);
}
httpClient.getState().setCredentials(
new AuthScope(url.getHost(), url.getPort(),
AuthScope.ANY_REALM),
new UsernamePasswordCredentials(user, password));
// JAXRS client library requires HTTP preemptive authentication
httpClient.getParams().setAuthenticationPreemptive(true);
if (logger.isDebugEnabled()) {
logger.debug("setupHttpClient: set preemptive authentication");
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("setupHttpClient() : no auth mode!");
}
}
} catch (Throwable e) {
e.printStackTrace();
}
}
/*
* This is a temp fix for RESTEasy upgrade in CSPACE-6281. The long-term solution will be to use
* the non-deprecated approach per the RESTEasy documentation.
*/
public void setupHttpClient4() {
try {
this.httpClient4 = new DefaultHttpClient();
if (useAuth()) {
String user = properties.getProperty(USER_PROPERTY);
String password = properties.getProperty(PASSWORD_PROPERTY);
if (logger.isDebugEnabled()) {
logger.debug("setupHttpClient() using url=" + url + " user="
+ user + " password=" + password);
}
httpClient.getState().setCredentials(
new AuthScope(url.getHost(), url.getPort(),
AuthScope.ANY_REALM),
new UsernamePasswordCredentials(user, password));
// JAXRS client library requires HTTP preemptive authentication
httpClient.getParams().setAuthenticationPreemptive(true);
if (logger.isDebugEnabled()) {
logger.debug("setupHttpClient: set preemptive authentication");
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("setupHttpClient() : no auth mode!");
}
}
} catch (Throwable e) {
e.printStackTrace();
}
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#isServerSecure
* ()
*/
@Override
public boolean isServerSecure() {
return Boolean.getBoolean("cspace.server.secure");
}
@Override
public P getProxy() {
return proxy;
}
/**
* allow to reset proxy as per security needs
*/
@Override
public void setProxy() {
ResteasyClient client = null;
String urlString = url.toString();
Class<P> proxyClass = this.getProxyClass();
if (useAuth()) {
String user = properties.getProperty(USER_PROPERTY);
String password = properties.getProperty(PASSWORD_PROPERTY);
client = (ResteasyClient)ClientBuilder.newClient().register(new Authenticator(user, password));
} else {
client = (ResteasyClient)ClientBuilder.newClient();
}
proxy = client.target(urlString).proxy(proxyClass);
}
/**
* allow to reset proxy as per security needs
*/
@Deprecated
public void _setProxy() {
Class<P> proxyClass = this.getProxyClass();
if (useAuth()) {
proxy = ProxyFactory.create(proxyClass,
getBaseURL(), new ApacheHttpClient4Executor(getHttpClient4()));
} else {
proxy = ProxyFactory.create(proxyClass,
getBaseURL());
}
}
@Override
public void setAuth(boolean useAuth,
String user, boolean useUser,
String password, boolean usePassword) {
if (useAuth == true) {
setProperty(CollectionSpaceClient.AUTH_PROPERTY, "true");
if (useUser) {
setProperty(CollectionSpaceClient.USER_PROPERTY,
user);
} else {
removeProperty(CollectionSpaceClient.USER_PROPERTY);
}
if (usePassword) {
setProperty(CollectionSpaceClient.PASSWORD_PROPERTY,
password);
} else {
removeProperty(CollectionSpaceClient.PASSWORD_PROPERTY);
}
} else {
removeProperty(CollectionSpaceClient.AUTH_PROPERTY);
}
setupHttpClient();
setupHttpClient(); // temp fix for CSPACE-6281
setProxy();
}
/*
*
* Common Proxied service calls
*
*/
/* (non-Javadoc)
* @see org.collectionspace.services.client.AbstractServiceClientImpl#delete(java.lang.String)
*/
@Override
public Response delete(String csid) {
return getProxy().delete(csid);
}
/**
* @param csid
* @return
* @see org.collectionspace.services.client.BlobProxy#getAuthorityRefs(java.lang.String)
*/
@Override
public Response getAuthorityRefs(String csid) { // Response.getEntity returns AuthorityRefList type
return getProxy().getAuthorityRefs(csid);
}
@Override
public Response getWorkflow(String csid) {
return getProxy().getWorkflow(csid);
}
@Override
public Response updateWorkflowWithTransition(String csid, String workflowTransition) {
return getProxy().updateWorkflowWithTransition(csid, workflowTransition);
}
/*
* Because of how RESTEasy creates proxy classes, sub-interfaces will need to override
* these methods with their specific "common" list return types. Otherwise, only the info
* in the AbstractCommonList type will be returned to the callers
*/
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#readList(java
* .lang.String, java.lang.String)
*/
@Override
public Response readList(Long pageSize,
Long pageNumber) {
return getProxy().readList(pageSize, pageNumber);
}
/*
* (non-Javadoc)
*
* @see
* org.collectionspace.services.client.CollectionSpaceClient#readList(java
* .lang.String, java.lang.String)
*/
@Override
public Response readList(String sortBy, Long pageSize,
Long pageNumber) {
return getProxy().readList(sortBy, pageSize, pageNumber);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kubernetes.producer;
import java.util.Map;
import io.fabric8.kubernetes.api.model.DoneableService;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceBuilder;
import io.fabric8.kubernetes.api.model.ServiceList;
import io.fabric8.kubernetes.api.model.ServiceSpec;
import io.fabric8.kubernetes.client.dsl.ClientMixedOperation;
import io.fabric8.kubernetes.client.dsl.ClientNonNamespaceOperation;
import io.fabric8.kubernetes.client.dsl.ClientResource;
import org.apache.camel.Exchange;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesEndpoint;
import org.apache.camel.impl.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KubernetesServicesProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory
.getLogger(KubernetesServicesProducer.class);
public KubernetesServicesProducer(KubernetesEndpoint endpoint) {
super(endpoint);
}
@Override
public KubernetesEndpoint getEndpoint() {
return (KubernetesEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String operation;
if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration()
.getOperation())) {
operation = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_OPERATION, String.class);
} else {
operation = getEndpoint().getKubernetesConfiguration()
.getOperation();
}
switch (operation) {
case KubernetesOperations.LIST_SERVICES_OPERATION:
doList(exchange, operation);
break;
case KubernetesOperations.LIST_SERVICES_BY_LABELS_OPERATION:
doListServiceByLabels(exchange, operation);
break;
case KubernetesOperations.GET_SERVICE_OPERATION:
doGetService(exchange, operation);
break;
case KubernetesOperations.CREATE_SERVICE_OPERATION:
doCreateService(exchange, operation);
break;
case KubernetesOperations.DELETE_SERVICE_OPERATION:
doDeleteService(exchange, operation);
break;
default:
throw new IllegalArgumentException("Unsupported operation "
+ operation);
}
}
protected void doList(Exchange exchange, String operation) throws Exception {
ServiceList servicesList = null;
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
servicesList = getEndpoint().getKubernetesClient().services()
.inNamespace(namespaceName).list();
} else {
servicesList = getEndpoint().getKubernetesClient().services()
.list();
}
exchange.getOut().setBody(servicesList.getItems());
}
protected void doListServiceByLabels(Exchange exchange, String operation)
throws Exception {
ServiceList servicesList = null;
Map<String, String> labels = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_SERVICE_LABELS, Map.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
ClientNonNamespaceOperation<Service, ServiceList, DoneableService, ClientResource<Service, DoneableService>> services;
services = getEndpoint().getKubernetesClient().services()
.inNamespace(namespaceName);
for (Map.Entry<String, String> entry : labels.entrySet()) {
services.withLabel(entry.getKey(), entry.getValue());
}
servicesList = services.list();
} else {
ClientMixedOperation<Service, ServiceList, DoneableService, ClientResource<Service, DoneableService>> services;
services = getEndpoint().getKubernetesClient().services();
for (Map.Entry<String, String> entry : labels.entrySet()) {
services.withLabel(entry.getKey(), entry.getValue());
}
servicesList = services.list();
}
exchange.getOut().setBody(servicesList.getItems());
}
protected void doGetService(Exchange exchange, String operation)
throws Exception {
Service service = null;
String serviceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_SERVICE_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(serviceName)) {
LOG.error("Get a specific service require specify a service name");
throw new IllegalArgumentException(
"Get a specific service require specify a service name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Get a specific service require specify a namespace name");
throw new IllegalArgumentException(
"Get a specific service require specify a namespace name");
}
service = getEndpoint().getKubernetesClient().services()
.inNamespace(namespaceName).withName(serviceName).get();
exchange.getOut().setBody(service);
}
protected void doCreateService(Exchange exchange, String operation)
throws Exception {
Service service = null;
String serviceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_SERVICE_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
ServiceSpec serviceSpec = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_SERVICE_SPEC, ServiceSpec.class);
if (ObjectHelper.isEmpty(serviceName)) {
LOG.error("Create a specific service require specify a service name");
throw new IllegalArgumentException(
"Create a specific service require specify a service name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Create a specific service require specify a namespace name");
throw new IllegalArgumentException(
"Create a specific service require specify a namespace name");
}
if (ObjectHelper.isEmpty(serviceSpec)) {
LOG.error("Create a specific service require specify a service spec bean");
throw new IllegalArgumentException(
"Create a specific service require specify a service spec bean");
}
Map<String, String> labels = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_SERVICE_LABELS, Map.class);
Service serviceCreating = new ServiceBuilder()
.withNewMetadata().withName(serviceName).withLabels(labels)
.endMetadata().withSpec(serviceSpec).build();
service = getEndpoint().getKubernetesClient().services()
.inNamespace(namespaceName).create(serviceCreating);
exchange.getOut().setBody(service);
}
protected void doDeleteService(Exchange exchange, String operation)
throws Exception {
String serviceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_SERVICE_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(serviceName)) {
LOG.error("Delete a specific service require specify a service name");
throw new IllegalArgumentException(
"Delete a specific service require specify a service name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Delete a specific service require specify a namespace name");
throw new IllegalArgumentException(
"Delete a specific service require specify a namespace name");
}
boolean serviceDeleted = getEndpoint().getKubernetesClient().services()
.inNamespace(namespaceName).withName(serviceName).delete();
exchange.getOut().setBody(serviceDeleted);
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/iot/v1/resources.proto
package com.google.cloud.iot.v1;
/**
*
*
* <pre>
* The device configuration. Eventually delivered to devices.
* </pre>
*
* Protobuf type {@code google.cloud.iot.v1.DeviceConfig}
*/
public final class DeviceConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.iot.v1.DeviceConfig)
DeviceConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeviceConfig.newBuilder() to construct.
private DeviceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeviceConfig() {
binaryData_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeviceConfig();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeviceConfig(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
version_ = input.readInt64();
break;
}
case 18:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (cloudUpdateTime_ != null) {
subBuilder = cloudUpdateTime_.toBuilder();
}
cloudUpdateTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(cloudUpdateTime_);
cloudUpdateTime_ = subBuilder.buildPartial();
}
break;
}
case 26:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (deviceAckTime_ != null) {
subBuilder = deviceAckTime_.toBuilder();
}
deviceAckTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(deviceAckTime_);
deviceAckTime_ = subBuilder.buildPartial();
}
break;
}
case 34:
{
binaryData_ = input.readBytes();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.iot.v1.ResourcesProto
.internal_static_google_cloud_iot_v1_DeviceConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.iot.v1.ResourcesProto
.internal_static_google_cloud_iot_v1_DeviceConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.iot.v1.DeviceConfig.class,
com.google.cloud.iot.v1.DeviceConfig.Builder.class);
}
public static final int VERSION_FIELD_NUMBER = 1;
private long version_;
/**
*
*
* <pre>
* [Output only] The version of this update. The version number is assigned by
* the server, and is always greater than 0 after device creation. The
* version must be 0 on the `CreateDevice` request if a `config` is
* specified; the response of `CreateDevice` will always have a value of 1.
* </pre>
*
* <code>int64 version = 1;</code>
*
* @return The version.
*/
@java.lang.Override
public long getVersion() {
return version_;
}
public static final int CLOUD_UPDATE_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp cloudUpdateTime_;
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*
* @return Whether the cloudUpdateTime field is set.
*/
@java.lang.Override
public boolean hasCloudUpdateTime() {
return cloudUpdateTime_ != null;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*
* @return The cloudUpdateTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCloudUpdateTime() {
return cloudUpdateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: cloudUpdateTime_;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCloudUpdateTimeOrBuilder() {
return getCloudUpdateTime();
}
public static final int DEVICE_ACK_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp deviceAckTime_;
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*
* @return Whether the deviceAckTime field is set.
*/
@java.lang.Override
public boolean hasDeviceAckTime() {
return deviceAckTime_ != null;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*
* @return The deviceAckTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getDeviceAckTime() {
return deviceAckTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: deviceAckTime_;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getDeviceAckTimeOrBuilder() {
return getDeviceAckTime();
}
public static final int BINARY_DATA_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString binaryData_;
/**
*
*
* <pre>
* The device configuration data.
* </pre>
*
* <code>bytes binary_data = 4;</code>
*
* @return The binaryData.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBinaryData() {
return binaryData_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (version_ != 0L) {
output.writeInt64(1, version_);
}
if (cloudUpdateTime_ != null) {
output.writeMessage(2, getCloudUpdateTime());
}
if (deviceAckTime_ != null) {
output.writeMessage(3, getDeviceAckTime());
}
if (!binaryData_.isEmpty()) {
output.writeBytes(4, binaryData_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (version_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, version_);
}
if (cloudUpdateTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCloudUpdateTime());
}
if (deviceAckTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDeviceAckTime());
}
if (!binaryData_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(4, binaryData_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.iot.v1.DeviceConfig)) {
return super.equals(obj);
}
com.google.cloud.iot.v1.DeviceConfig other = (com.google.cloud.iot.v1.DeviceConfig) obj;
if (getVersion() != other.getVersion()) return false;
if (hasCloudUpdateTime() != other.hasCloudUpdateTime()) return false;
if (hasCloudUpdateTime()) {
if (!getCloudUpdateTime().equals(other.getCloudUpdateTime())) return false;
}
if (hasDeviceAckTime() != other.hasDeviceAckTime()) return false;
if (hasDeviceAckTime()) {
if (!getDeviceAckTime().equals(other.getDeviceAckTime())) return false;
}
if (!getBinaryData().equals(other.getBinaryData())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getVersion());
if (hasCloudUpdateTime()) {
hash = (37 * hash) + CLOUD_UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCloudUpdateTime().hashCode();
}
if (hasDeviceAckTime()) {
hash = (37 * hash) + DEVICE_ACK_TIME_FIELD_NUMBER;
hash = (53 * hash) + getDeviceAckTime().hashCode();
}
hash = (37 * hash) + BINARY_DATA_FIELD_NUMBER;
hash = (53 * hash) + getBinaryData().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.iot.v1.DeviceConfig parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.iot.v1.DeviceConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.iot.v1.DeviceConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.iot.v1.DeviceConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The device configuration. Eventually delivered to devices.
* </pre>
*
* Protobuf type {@code google.cloud.iot.v1.DeviceConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.iot.v1.DeviceConfig)
com.google.cloud.iot.v1.DeviceConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.iot.v1.ResourcesProto
.internal_static_google_cloud_iot_v1_DeviceConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.iot.v1.ResourcesProto
.internal_static_google_cloud_iot_v1_DeviceConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.iot.v1.DeviceConfig.class,
com.google.cloud.iot.v1.DeviceConfig.Builder.class);
}
// Construct using com.google.cloud.iot.v1.DeviceConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
version_ = 0L;
if (cloudUpdateTimeBuilder_ == null) {
cloudUpdateTime_ = null;
} else {
cloudUpdateTime_ = null;
cloudUpdateTimeBuilder_ = null;
}
if (deviceAckTimeBuilder_ == null) {
deviceAckTime_ = null;
} else {
deviceAckTime_ = null;
deviceAckTimeBuilder_ = null;
}
binaryData_ = com.google.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.iot.v1.ResourcesProto
.internal_static_google_cloud_iot_v1_DeviceConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.iot.v1.DeviceConfig getDefaultInstanceForType() {
return com.google.cloud.iot.v1.DeviceConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.iot.v1.DeviceConfig build() {
com.google.cloud.iot.v1.DeviceConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.iot.v1.DeviceConfig buildPartial() {
com.google.cloud.iot.v1.DeviceConfig result = new com.google.cloud.iot.v1.DeviceConfig(this);
result.version_ = version_;
if (cloudUpdateTimeBuilder_ == null) {
result.cloudUpdateTime_ = cloudUpdateTime_;
} else {
result.cloudUpdateTime_ = cloudUpdateTimeBuilder_.build();
}
if (deviceAckTimeBuilder_ == null) {
result.deviceAckTime_ = deviceAckTime_;
} else {
result.deviceAckTime_ = deviceAckTimeBuilder_.build();
}
result.binaryData_ = binaryData_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.iot.v1.DeviceConfig) {
return mergeFrom((com.google.cloud.iot.v1.DeviceConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.iot.v1.DeviceConfig other) {
if (other == com.google.cloud.iot.v1.DeviceConfig.getDefaultInstance()) return this;
if (other.getVersion() != 0L) {
setVersion(other.getVersion());
}
if (other.hasCloudUpdateTime()) {
mergeCloudUpdateTime(other.getCloudUpdateTime());
}
if (other.hasDeviceAckTime()) {
mergeDeviceAckTime(other.getDeviceAckTime());
}
if (other.getBinaryData() != com.google.protobuf.ByteString.EMPTY) {
setBinaryData(other.getBinaryData());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.iot.v1.DeviceConfig parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.iot.v1.DeviceConfig) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private long version_;
/**
*
*
* <pre>
* [Output only] The version of this update. The version number is assigned by
* the server, and is always greater than 0 after device creation. The
* version must be 0 on the `CreateDevice` request if a `config` is
* specified; the response of `CreateDevice` will always have a value of 1.
* </pre>
*
* <code>int64 version = 1;</code>
*
* @return The version.
*/
@java.lang.Override
public long getVersion() {
return version_;
}
/**
*
*
* <pre>
* [Output only] The version of this update. The version number is assigned by
* the server, and is always greater than 0 after device creation. The
* version must be 0 on the `CreateDevice` request if a `config` is
* specified; the response of `CreateDevice` will always have a value of 1.
* </pre>
*
* <code>int64 version = 1;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(long value) {
version_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* [Output only] The version of this update. The version number is assigned by
* the server, and is always greater than 0 after device creation. The
* version must be 0 on the `CreateDevice` request if a `config` is
* specified; the response of `CreateDevice` will always have a value of 1.
* </pre>
*
* <code>int64 version = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = 0L;
onChanged();
return this;
}
private com.google.protobuf.Timestamp cloudUpdateTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
cloudUpdateTimeBuilder_;
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*
* @return Whether the cloudUpdateTime field is set.
*/
public boolean hasCloudUpdateTime() {
return cloudUpdateTimeBuilder_ != null || cloudUpdateTime_ != null;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*
* @return The cloudUpdateTime.
*/
public com.google.protobuf.Timestamp getCloudUpdateTime() {
if (cloudUpdateTimeBuilder_ == null) {
return cloudUpdateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: cloudUpdateTime_;
} else {
return cloudUpdateTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
public Builder setCloudUpdateTime(com.google.protobuf.Timestamp value) {
if (cloudUpdateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
cloudUpdateTime_ = value;
onChanged();
} else {
cloudUpdateTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
public Builder setCloudUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (cloudUpdateTimeBuilder_ == null) {
cloudUpdateTime_ = builderForValue.build();
onChanged();
} else {
cloudUpdateTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
public Builder mergeCloudUpdateTime(com.google.protobuf.Timestamp value) {
if (cloudUpdateTimeBuilder_ == null) {
if (cloudUpdateTime_ != null) {
cloudUpdateTime_ =
com.google.protobuf.Timestamp.newBuilder(cloudUpdateTime_)
.mergeFrom(value)
.buildPartial();
} else {
cloudUpdateTime_ = value;
}
onChanged();
} else {
cloudUpdateTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
public Builder clearCloudUpdateTime() {
if (cloudUpdateTimeBuilder_ == null) {
cloudUpdateTime_ = null;
onChanged();
} else {
cloudUpdateTime_ = null;
cloudUpdateTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getCloudUpdateTimeBuilder() {
onChanged();
return getCloudUpdateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getCloudUpdateTimeOrBuilder() {
if (cloudUpdateTimeBuilder_ != null) {
return cloudUpdateTimeBuilder_.getMessageOrBuilder();
} else {
return cloudUpdateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: cloudUpdateTime_;
}
}
/**
*
*
* <pre>
* [Output only] The time at which this configuration version was updated in
* Cloud IoT Core. This timestamp is set by the server.
* </pre>
*
* <code>.google.protobuf.Timestamp cloud_update_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCloudUpdateTimeFieldBuilder() {
if (cloudUpdateTimeBuilder_ == null) {
cloudUpdateTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCloudUpdateTime(), getParentForChildren(), isClean());
cloudUpdateTime_ = null;
}
return cloudUpdateTimeBuilder_;
}
private com.google.protobuf.Timestamp deviceAckTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
deviceAckTimeBuilder_;
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*
* @return Whether the deviceAckTime field is set.
*/
public boolean hasDeviceAckTime() {
return deviceAckTimeBuilder_ != null || deviceAckTime_ != null;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*
* @return The deviceAckTime.
*/
public com.google.protobuf.Timestamp getDeviceAckTime() {
if (deviceAckTimeBuilder_ == null) {
return deviceAckTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: deviceAckTime_;
} else {
return deviceAckTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
public Builder setDeviceAckTime(com.google.protobuf.Timestamp value) {
if (deviceAckTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deviceAckTime_ = value;
onChanged();
} else {
deviceAckTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
public Builder setDeviceAckTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (deviceAckTimeBuilder_ == null) {
deviceAckTime_ = builderForValue.build();
onChanged();
} else {
deviceAckTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
public Builder mergeDeviceAckTime(com.google.protobuf.Timestamp value) {
if (deviceAckTimeBuilder_ == null) {
if (deviceAckTime_ != null) {
deviceAckTime_ =
com.google.protobuf.Timestamp.newBuilder(deviceAckTime_)
.mergeFrom(value)
.buildPartial();
} else {
deviceAckTime_ = value;
}
onChanged();
} else {
deviceAckTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
public Builder clearDeviceAckTime() {
if (deviceAckTimeBuilder_ == null) {
deviceAckTime_ = null;
onChanged();
} else {
deviceAckTime_ = null;
deviceAckTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getDeviceAckTimeBuilder() {
onChanged();
return getDeviceAckTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getDeviceAckTimeOrBuilder() {
if (deviceAckTimeBuilder_ != null) {
return deviceAckTimeBuilder_.getMessageOrBuilder();
} else {
return deviceAckTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: deviceAckTime_;
}
}
/**
*
*
* <pre>
* [Output only] The time at which Cloud IoT Core received the
* acknowledgment from the device, indicating that the device has received
* this configuration version. If this field is not present, the device has
* not yet acknowledged that it received this version. Note that when
* the config was sent to the device, many config versions may have been
* available in Cloud IoT Core while the device was disconnected, and on
* connection, only the latest version is sent to the device. Some
* versions may never be sent to the device, and therefore are never
* acknowledged. This timestamp is set by Cloud IoT Core.
* </pre>
*
* <code>.google.protobuf.Timestamp device_ack_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getDeviceAckTimeFieldBuilder() {
if (deviceAckTimeBuilder_ == null) {
deviceAckTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getDeviceAckTime(), getParentForChildren(), isClean());
deviceAckTime_ = null;
}
return deviceAckTimeBuilder_;
}
private com.google.protobuf.ByteString binaryData_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* The device configuration data.
* </pre>
*
* <code>bytes binary_data = 4;</code>
*
* @return The binaryData.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBinaryData() {
return binaryData_;
}
/**
*
*
* <pre>
* The device configuration data.
* </pre>
*
* <code>bytes binary_data = 4;</code>
*
* @param value The binaryData to set.
* @return This builder for chaining.
*/
public Builder setBinaryData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
binaryData_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The device configuration data.
* </pre>
*
* <code>bytes binary_data = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearBinaryData() {
binaryData_ = getDefaultInstance().getBinaryData();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.iot.v1.DeviceConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeviceConfig)
private static final com.google.cloud.iot.v1.DeviceConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.iot.v1.DeviceConfig();
}
public static com.google.cloud.iot.v1.DeviceConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeviceConfig> PARSER =
new com.google.protobuf.AbstractParser<DeviceConfig>() {
@java.lang.Override
public DeviceConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeviceConfig(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeviceConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeviceConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.iot.v1.DeviceConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.find.impl.livePreview;
import com.intellij.find.*;
import com.intellij.find.impl.FindResultImpl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.SelectionEvent;
import com.intellij.openapi.editor.event.SelectionListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.util.Alarm;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
public class LivePreviewController implements LivePreview.Delegate, FindUtil.ReplaceDelegate {
private static final Logger LOG = Logger.getInstance("#com.intellij.find.impl.livePreview.LivePreviewController");
public static final int USER_ACTIVITY_TRIGGERING_DELAY = 30;
public static final int MATCHES_LIMIT = 10000;
protected EditorSearchComponent myComponent;
private int myUserActivityDelay = USER_ACTIVITY_TRIGGERING_DELAY;
private final Alarm myLivePreviewAlarm = new Alarm(Alarm.ThreadToUse.SHARED_THREAD);
protected SearchResults mySearchResults;
private LivePreview myLivePreview;
private final boolean myReplaceDenied = false;
private boolean mySuppressUpdate = false;
private boolean myTrackingDocument;
private boolean myChanged;
private boolean myListeningSelection = false;
private final SelectionListener mySelectionListener = new SelectionListener() {
@Override
public void selectionChanged(SelectionEvent e) {
smartUpdate();
}
};
private boolean myDisposed;
public void setTrackingSelection(boolean b) {
if (b) {
if (!myListeningSelection) {
getEditor().getSelectionModel().addSelectionListener(mySelectionListener);
}
} else {
if (myListeningSelection) {
getEditor().getSelectionModel().removeSelectionListener(mySelectionListener);
}
}
myListeningSelection = b;
}
private final DocumentAdapter myDocumentListener = new DocumentAdapter() {
@Override
public void documentChanged(final DocumentEvent e) {
if (!myTrackingDocument) {
myChanged = true;
return;
}
if (!mySuppressUpdate) {
smartUpdate();
} else {
mySuppressUpdate = false;
}
}
};
private void smartUpdate() {
myLivePreview.inSmartUpdate();
updateInBackground(mySearchResults.getFindModel(), false);
}
public void moveCursor(SearchResults.Direction direction) {
if (direction == SearchResults.Direction.UP) {
mySearchResults.prevOccurrence(false);
} else {
mySearchResults.nextOccurrence(false);
}
}
public boolean isReplaceDenied() {
return myReplaceDenied;
}
public LivePreviewController(SearchResults searchResults, @Nullable EditorSearchComponent component) {
mySearchResults = searchResults;
myComponent = component;
getEditor().getDocument().addDocumentListener(myDocumentListener);
}
public int getUserActivityDelay() {
return myUserActivityDelay;
}
public void setUserActivityDelay(int userActivityDelay) {
myUserActivityDelay = userActivityDelay;
}
public void updateInBackground(FindModel findModel, final boolean allowedToChangedEditorSelection) {
final int stamp = mySearchResults.getStamp();
myLivePreviewAlarm.cancelAllRequests();
if (findModel == null) return;
final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode();
final FindModel copy = new FindModel();
copy.copyFrom(findModel);
Runnable request = new Runnable() {
@Override
public void run() {
if (myDisposed) return;
Project project = mySearchResults.getProject();
if (project != null && project.isDisposed()) return;
mySearchResults.updateThreadSafe(copy, allowedToChangedEditorSelection, null, stamp);
}
};
if (unitTestMode) {
request.run();
} else {
myLivePreviewAlarm.addRequest(request, myUserActivityDelay);
}
}
@Override
public String getStringToReplace(@NotNull Editor editor, @Nullable FindResult findResult) {
if (findResult == null) {
return null;
}
String foundString = editor.getDocument().getText(findResult);
String documentText = editor.getDocument().getText();
FindModel currentModel = mySearchResults.getFindModel();
String stringToReplace = null;
if (currentModel != null) {
if (currentModel.isReplaceState()) {
FindManager findManager = FindManager.getInstance(editor.getProject());
try {
stringToReplace = findManager.getStringToReplace(foundString, currentModel,
findResult.getStartOffset(), documentText);
}
catch (FindManager.MalformedReplacementStringException e) {
return null;
}
}
}
return stringToReplace;
}
@Nullable
public TextRange performReplace(final FindResult occurrence, final String replacement, final Editor editor) {
if (myReplaceDenied || !ReadonlyStatusHandler.ensureDocumentWritable(editor.getProject(), editor.getDocument())) return null;
FindModel findModel = mySearchResults.getFindModel();
TextRange result = FindUtil.doReplace(editor.getProject(),
editor.getDocument(),
findModel,
new FindResultImpl(occurrence.getStartOffset(), occurrence.getEndOffset()),
replacement,
true,
new ArrayList<Pair<TextRange, String>>());
myLivePreview.inSmartUpdate();
mySearchResults.updateThreadSafe(findModel, true, result, mySearchResults.getStamp());
return result;
}
public void performReplaceAll(Editor e) {
if (!ReadonlyStatusHandler.ensureDocumentWritable(e.getProject(), e.getDocument())) return;
if (mySearchResults.getFindModel() != null) {
final FindModel copy = new FindModel();
copy.copyFrom(mySearchResults.getFindModel());
final SelectionModel selectionModel = mySearchResults.getEditor().getSelectionModel();
final int offset;
if (!selectionModel.hasSelection() || copy.isGlobal()) {
copy.setGlobal(true);
offset = 0;
} else {
offset = selectionModel.getBlockSelectionStarts()[0];
}
FindUtil.replace(e.getProject(), e, offset, copy, this);
}
}
@Override
public boolean shouldReplace(TextRange range, String replace) {
for (RangeMarker r : mySearchResults.getExcluded()) {
if (TextRange.areSegmentsEqual(r, range)) {
return false;
}
}
return true;
}
public boolean canReplace() {
if (mySearchResults != null && mySearchResults.getCursor() != null && !isReplaceDenied()) {
final String replacement = getStringToReplace(getEditor(), mySearchResults.getCursor());
return replacement != null;
}
return false;
}
private Editor getEditor() {
return mySearchResults.getEditor();
}
public void performReplace() {
mySuppressUpdate = true;
String replacement = getStringToReplace(getEditor(), mySearchResults.getCursor());
if (replacement == null) {
return;
}
final TextRange textRange = performReplace(mySearchResults.getCursor(), replacement, getEditor());
if (textRange == null) {
mySuppressUpdate = false;
}
if (myComponent != null) {
myComponent.addTextToRecent(myComponent.getReplaceField());
myComponent.clearUndoInTextFields();
}
}
public void exclude() {
mySearchResults.exclude(mySearchResults.getCursor());
}
public void performReplaceAll() {
performReplaceAll(getEditor());
}
public void setTrackingDocument(boolean trackingDocument) {
myTrackingDocument = trackingDocument;
}
public void setLivePreview(LivePreview livePreview) {
if (myLivePreview != null) {
myLivePreview.dispose();
myLivePreview.setDelegate(null);
}
myLivePreview = livePreview;
if (myLivePreview != null) {
myLivePreview.setDelegate(this);
}
}
public void dispose() {
if (myDisposed) return;
myLivePreview.cleanUp();
off();
mySearchResults.dispose();
getEditor().getDocument().removeDocumentListener(myDocumentListener);
myDisposed = true;
}
public void on() {
if (myDisposed) return;
mySearchResults.setMatchesLimit(MATCHES_LIMIT);
setTrackingDocument(true);
if (myChanged) {
mySearchResults.clear();
myChanged = false;
}
setLivePreview(new LivePreview(mySearchResults));
}
public void off() {
if (myDisposed) return;
setTrackingDocument(false);
setLivePreview(null);
setTrackingSelection(false);
}
}
|
|
/*
* Copyright 2013-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.aws.core.io.s3;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.GetObjectMetadataRequest;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatcher;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* @author Alain Sahli
* @author Agim Emruli
* @author Greg Turnquist
* @since 1.0
*/
class PathMatchingSimpleStorageResourcePatternResolverTest {
@Test
void testWildcardInBucketName() throws Exception {
AmazonS3 amazonS3 = prepareMockForTestWildcardInBucketName();
ResourcePatternResolver resourceLoader = getResourceLoader(amazonS3);
assertThat(resourceLoader.getResources("s3://myBucket*/test.txt").length).as("test the single '*' wildcard")
.isEqualTo(2);
assertThat(resourceLoader.getResources("s3://myBucket?wo/test.txt").length).as("test the '?' wildcard")
.isEqualTo(1);
assertThat(resourceLoader.getResources("s3://**/test.txt").length).as("test the double '**' wildcard")
.isEqualTo(2);
}
@Test
void testWildcardInKey() throws IOException {
AmazonS3 amazonS3 = prepareMockForTestWildcardInKey();
ResourcePatternResolver resourceLoader = getResourceLoader(amazonS3);
assertThat(resourceLoader.getResources("s3://myBucket/foo*/bar*/test.txt").length)
.as("test the single '*' wildcard").isEqualTo(2);
assertThat(resourceLoader.getResources("s3://myBucket/").length).as("test the bucket name only").isEqualTo(1);
assertThat(resourceLoader.getResources("s3://myBucke?/fooOne/ba?One/test.txt").length)
.as("test the '?' wildcard").isEqualTo(2);
assertThat(resourceLoader.getResources("s3://myBucket/**/test.txt").length).as("test the double '**' wildcard")
.isEqualTo(5);
assertThat(resourceLoader.getResources("s3://myBucke?/**/*.txt").length).as("test all together").isEqualTo(5);
}
@Test
void testLoadingClasspathFile() throws Exception {
AmazonS3 amazonS3 = mock(AmazonS3.class);
ResourcePatternResolver resourceLoader = getResourceLoader(amazonS3);
Resource[] resources = resourceLoader.getResources(
"classpath*:org/springframework/cloud/aws/core/io/s3/PathMatchingSimpleStorageResourcePatternResolverTest.class");
assertThat(resources.length).isEqualTo(1);
assertThat(resources[0].exists()).as("load without wildcards").isTrue();
Resource[] resourcesWithFileNameWildcard = resourceLoader
.getResources("classpath*:org/**/PathMatchingSimpleStorageResourcePatternResolverTes?.class");
assertThat(resourcesWithFileNameWildcard.length).isEqualTo(1);
assertThat(resourcesWithFileNameWildcard[0].exists()).as("load with wildcards").isTrue();
}
@Test
void testTruncatedListings() throws Exception {
AmazonS3 amazonS3 = prepareMockForTestTruncatedListings();
ResourcePatternResolver resourceLoader = getResourceLoader(amazonS3);
assertThat(resourceLoader.getResources("s3://myBucket/**/test.txt").length)
.as("Test that all parts are returned when object summaries are truncated").isEqualTo(5);
assertThat(resourceLoader.getResources("s3://myBucket/fooOne/ba*/test.txt").length)
.as("Test that all parts are return when common prefixes are truncated").isEqualTo(1);
assertThat(resourceLoader.getResources("s3://myBucket/").length)
.as("Test that all parts are returned when only bucket name is used").isEqualTo(1);
}
private AmazonS3 prepareMockForTestTruncatedListings() {
AmazonS3 amazonS3 = mock(AmazonS3.class);
// Without prefix calls
ObjectListing objectListingWithoutPrefixPart1 = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooOne/barOne/test.txt"),
createS3ObjectSummaryWithKey("fooOne/bazOne/test.txt"),
createS3ObjectSummaryWithKey("fooTwo/barTwo/test.txt")),
Collections.emptyList(), true);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", null, null))))
.thenReturn(objectListingWithoutPrefixPart1);
ObjectListing objectListingWithoutPrefixPart2 = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooThree/baz/test.txt"),
createS3ObjectSummaryWithKey("foFour/barFour/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listNextBatchOfObjects(objectListingWithoutPrefixPart1))
.thenReturn(objectListingWithoutPrefixPart2);
// With prefix calls
ObjectListing objectListingWithPrefixPart1 = createObjectListingMock(Collections.emptyList(),
Arrays.asList("dooOne/", "dooTwo/"), true);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", null, "/"))))
.thenReturn(objectListingWithPrefixPart1);
ObjectListing objectListingWithPrefixPart2 = createObjectListingMock(Collections.emptyList(),
Collections.singletonList("fooOne/"), false);
when(amazonS3.listNextBatchOfObjects(objectListingWithPrefixPart1)).thenReturn(objectListingWithPrefixPart2);
ObjectListing objectListingWithPrefixFooOne = createObjectListingMock(Collections.emptyList(),
Collections.singletonList("fooOne/barOne/"), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooOne/", "/"))))
.thenReturn(objectListingWithPrefixFooOne);
ObjectListing objectListingWithPrefixFooOneBarOne = createObjectListingMock(
Collections.singletonList(createS3ObjectSummaryWithKey("fooOne/barOne/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooOne/barOne/", "/"))))
.thenReturn(objectListingWithPrefixFooOneBarOne);
when(amazonS3.getObjectMetadata(any(GetObjectMetadataRequest.class))).thenReturn(new ObjectMetadata());
return amazonS3;
}
private AmazonS3 prepareMockForTestWildcardInBucketName() {
AmazonS3 amazonS3 = mock(AmazonS3.class);
when(amazonS3.listBuckets()).thenReturn(Arrays.asList(new Bucket("myBucketOne"), new Bucket("myBucketTwo"),
new Bucket("anotherBucket"), new Bucket("myBuckez")));
// Mocks for the '**' case
ObjectListing objectListingWithOneFile = createObjectListingMock(
Collections.singletonList(createS3ObjectSummaryWithKey("test.txt")), Collections.emptyList(), false);
ObjectListing emptyObjectListing = createObjectListingMock(Collections.emptyList(), Collections.emptyList(),
false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucketOne", null, null))))
.thenReturn(objectListingWithOneFile);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucketTwo", null, null))))
.thenReturn(emptyObjectListing);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("anotherBucket", null, null))))
.thenReturn(objectListingWithOneFile);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBuckez", null, null))))
.thenReturn(emptyObjectListing);
when(amazonS3.getObjectMetadata(any(GetObjectMetadataRequest.class))).thenReturn(new ObjectMetadata());
return amazonS3;
}
/**
* Virtual test folder structure: fooOne/barOne/test.txt fooOne/bazOne/test.txt
* fooTwo/barTwo/test.txt fooThree/baz/test.txt foFour/barFour/test.txt .
*/
private AmazonS3 prepareMockForTestWildcardInKey() {
AmazonS3 amazonS3 = mock(AmazonS3.class);
// List buckets mock
when(amazonS3.listBuckets()).thenReturn(Arrays.asList(new Bucket("myBucket"), new Bucket("myBuckets")));
// Root requests
ObjectListing objectListingMockAtRoot = createObjectListingMock(Collections.emptyList(),
Arrays.asList("foFour/", "fooOne/", "fooThree/", "fooTwo/"), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", null, "/"))))
.thenReturn(objectListingMockAtRoot);
// Requests on fooOne
ObjectListing objectListingFooOne = createObjectListingMock(
Collections.singletonList(createS3ObjectSummaryWithKey("fooOne/")),
Arrays.asList("fooOne/barOne/", "fooOne/bazOne/"), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooOne/", "/"))))
.thenReturn(objectListingFooOne);
ObjectListing objectListingFooOneBarOne = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooOne/barOne/"),
createS3ObjectSummaryWithKey("fooOne/barOne/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooOne/barOne/", "/"))))
.thenReturn(objectListingFooOneBarOne);
ObjectListing objectListingFooOneBazOne = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooOne/bazOne/"),
createS3ObjectSummaryWithKey("fooOne/bazOne/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooOne/bazOne/", "/"))))
.thenReturn(objectListingFooOneBazOne);
// Requests on fooTwo
ObjectListing objectListingFooTwo = createObjectListingMock(
Collections.singletonList(createS3ObjectSummaryWithKey("fooTwo/")),
Collections.singletonList("fooTwo/barTwo/"), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooTwo/", "/"))))
.thenReturn(objectListingFooTwo);
ObjectListing objectListingFooTwoBarTwo = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooTwo/barTwo/"),
createS3ObjectSummaryWithKey("fooTwo/barTwo/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooTwo/barTwo/", "/"))))
.thenReturn(objectListingFooTwoBarTwo);
// Requests on fooThree
ObjectListing objectListingFooThree = createObjectListingMock(
Collections.singletonList(createS3ObjectSummaryWithKey("fooThree/")),
Collections.singletonList("fooTwo/baz/"), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooThree/", "/"))))
.thenReturn(objectListingFooThree);
ObjectListing objectListingFooThreeBaz = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooThree/baz/"),
createS3ObjectSummaryWithKey("fooThree/baz/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "fooThree/baz/", "/"))))
.thenReturn(objectListingFooThreeBaz);
// Requests for foFour
ObjectListing objectListingFoFour = createObjectListingMock(
Collections.singletonList(createS3ObjectSummaryWithKey("foFour/")),
Collections.singletonList("foFour/barFour/"), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "foFour/", "/"))))
.thenReturn(objectListingFoFour);
ObjectListing objectListingFoFourBarFour = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("foFour/barFour/"),
createS3ObjectSummaryWithKey("foFour/barFour/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", "foFour/barFour/", "/"))))
.thenReturn(objectListingFoFourBarFour);
// Requests for all
ObjectListing fullObjectListing = createObjectListingMock(
Arrays.asList(createS3ObjectSummaryWithKey("fooOne/barOne/test.txt"),
createS3ObjectSummaryWithKey("fooOne/bazOne/test.txt"),
createS3ObjectSummaryWithKey("fooTwo/barTwo/test.txt"),
createS3ObjectSummaryWithKey("fooThree/baz/test.txt"),
createS3ObjectSummaryWithKey("foFour/barFour/test.txt")),
Collections.emptyList(), false);
when(amazonS3.listObjects(argThat(new ListObjectsRequestMatcher("myBucket", null, null))))
.thenReturn(fullObjectListing);
when(amazonS3.getObjectMetadata(any(GetObjectMetadataRequest.class))).thenReturn(new ObjectMetadata());
return amazonS3;
}
private ObjectListing createObjectListingMock(List<S3ObjectSummary> objectSummaries, List<String> commonPrefixes,
boolean truncated) {
ObjectListing objectListing = mock(ObjectListing.class);
when(objectListing.getObjectSummaries()).thenReturn(objectSummaries);
when(objectListing.getCommonPrefixes()).thenReturn(commonPrefixes);
when(objectListing.isTruncated()).thenReturn(truncated);
return objectListing;
}
private S3ObjectSummary createS3ObjectSummaryWithKey(String key) {
S3ObjectSummary s3ObjectSummary = new S3ObjectSummary();
s3ObjectSummary.setKey(key);
return s3ObjectSummary;
}
private ResourcePatternResolver getResourceLoader(AmazonS3 amazonS3) {
DefaultResourceLoader loader = new DefaultResourceLoader();
loader.addProtocolResolver(new SimpleStorageProtocolResolver(amazonS3));
return new PathMatchingSimpleStorageResourcePatternResolver(amazonS3,
new PathMatchingResourcePatternResolver(loader));
}
private static final class ListObjectsRequestMatcher implements ArgumentMatcher<ListObjectsRequest> {
private final String bucketName;
private final String prefix;
private final String delimiter;
private ListObjectsRequestMatcher(String bucketName, String prefix, String delimiter) {
this.bucketName = bucketName;
this.prefix = prefix;
this.delimiter = delimiter;
}
@Override
public boolean matches(ListObjectsRequest listObjectsRequest) {
if (listObjectsRequest == null) {
return false;
}
boolean bucketNameIsEqual;
if (listObjectsRequest.getBucketName() != null) {
bucketNameIsEqual = listObjectsRequest.getBucketName().equals(this.bucketName);
}
else {
bucketNameIsEqual = this.bucketName == null;
}
boolean prefixIsEqual;
if (listObjectsRequest.getPrefix() != null) {
prefixIsEqual = listObjectsRequest.getPrefix().equals(this.prefix);
}
else {
prefixIsEqual = this.prefix == null;
}
boolean delimiterIsEqual;
if (listObjectsRequest.getDelimiter() != null) {
delimiterIsEqual = listObjectsRequest.getDelimiter().equals(this.delimiter);
}
else {
delimiterIsEqual = this.delimiter == null;
}
return delimiterIsEqual && prefixIsEqual && bucketNameIsEqual;
}
}
}
|
|
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* GrpSettings.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202202;
/**
* {@code GrpSettings} contains information for a line item that will
* have a target demographic when
* serving. This information will be used to set up tracking
* and enable reporting on the demographic
* information.
*/
public class GrpSettings implements java.io.Serializable {
/* Specifies the minimum target age (in years) of the {@link LineItem}.
* This field is only
* applicable if {@link #provider} is not null. */
private java.lang.Long minTargetAge;
/* Specifies the maximum target age (in years) of the {@link LineItem}.
* This field is only
* applicable if {@link #provider} is not null. */
private java.lang.Long maxTargetAge;
/* Specifies the target gender of the {@link LineItem}. This field
* is only applicable if
* {@link #provider} is not null. */
private com.google.api.ads.admanager.axis.v202202.GrpTargetGender targetGender;
/* Specifies the GRP provider of the {@link LineItem}. */
private com.google.api.ads.admanager.axis.v202202.GrpProvider provider;
/* Specifies the impression goal for the given target demographic.
* This field is only applicable
* if {@link #provider} is not null and demographics-based
* goal is selected by the user. If this
* field is set, {@link LineItem#primaryGoal} will have
* its {@link Goal#units} value set by Google
* to represent the estimated total quantity. */
private java.lang.Long targetImpressionGoal;
/* Estimate for the in-target ratio given the line item's audience
* targeting. This field is only
* applicable if {@link #provider} is Nielsen, {@link
* LineItem#primaryGoal#unitType} is in-target
* impressions, and {@link LineItem#CostType} is in-target
* CPM. This field determines the
* in-target ratio to use for pacing Nielsen line items
* before Nielsen reporting data is
* available. Represented as a milli percent, so 55.7%
* becomes 55700. */
private java.lang.Long inTargetRatioEstimateMilliPercent;
/* Specifies which pacing computation to apply in pacing to impressions
* from connected devices.
* This field is required if {@code enableNielsenCoViewingSupport}
* is true. */
private com.google.api.ads.admanager.axis.v202202.NielsenCtvPacingType nielsenCtvPacingType;
/* Specifies whether to use Google or Nielsen device breakdown
* in Nielsen Line Item auto pacing. */
private com.google.api.ads.admanager.axis.v202202.PacingDeviceCategorizationType pacingDeviceCategorizationType;
private java.lang.Boolean applyTrueCoview;
public GrpSettings() {
}
public GrpSettings(
java.lang.Long minTargetAge,
java.lang.Long maxTargetAge,
com.google.api.ads.admanager.axis.v202202.GrpTargetGender targetGender,
com.google.api.ads.admanager.axis.v202202.GrpProvider provider,
java.lang.Long targetImpressionGoal,
java.lang.Long inTargetRatioEstimateMilliPercent,
com.google.api.ads.admanager.axis.v202202.NielsenCtvPacingType nielsenCtvPacingType,
com.google.api.ads.admanager.axis.v202202.PacingDeviceCategorizationType pacingDeviceCategorizationType,
java.lang.Boolean applyTrueCoview) {
this.minTargetAge = minTargetAge;
this.maxTargetAge = maxTargetAge;
this.targetGender = targetGender;
this.provider = provider;
this.targetImpressionGoal = targetImpressionGoal;
this.inTargetRatioEstimateMilliPercent = inTargetRatioEstimateMilliPercent;
this.nielsenCtvPacingType = nielsenCtvPacingType;
this.pacingDeviceCategorizationType = pacingDeviceCategorizationType;
this.applyTrueCoview = applyTrueCoview;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("applyTrueCoview", getApplyTrueCoview())
.add("inTargetRatioEstimateMilliPercent", getInTargetRatioEstimateMilliPercent())
.add("maxTargetAge", getMaxTargetAge())
.add("minTargetAge", getMinTargetAge())
.add("nielsenCtvPacingType", getNielsenCtvPacingType())
.add("pacingDeviceCategorizationType", getPacingDeviceCategorizationType())
.add("provider", getProvider())
.add("targetGender", getTargetGender())
.add("targetImpressionGoal", getTargetImpressionGoal())
.toString();
}
/**
* Gets the minTargetAge value for this GrpSettings.
*
* @return minTargetAge * Specifies the minimum target age (in years) of the {@link LineItem}.
* This field is only
* applicable if {@link #provider} is not null.
*/
public java.lang.Long getMinTargetAge() {
return minTargetAge;
}
/**
* Sets the minTargetAge value for this GrpSettings.
*
* @param minTargetAge * Specifies the minimum target age (in years) of the {@link LineItem}.
* This field is only
* applicable if {@link #provider} is not null.
*/
public void setMinTargetAge(java.lang.Long minTargetAge) {
this.minTargetAge = minTargetAge;
}
/**
* Gets the maxTargetAge value for this GrpSettings.
*
* @return maxTargetAge * Specifies the maximum target age (in years) of the {@link LineItem}.
* This field is only
* applicable if {@link #provider} is not null.
*/
public java.lang.Long getMaxTargetAge() {
return maxTargetAge;
}
/**
* Sets the maxTargetAge value for this GrpSettings.
*
* @param maxTargetAge * Specifies the maximum target age (in years) of the {@link LineItem}.
* This field is only
* applicable if {@link #provider} is not null.
*/
public void setMaxTargetAge(java.lang.Long maxTargetAge) {
this.maxTargetAge = maxTargetAge;
}
/**
* Gets the targetGender value for this GrpSettings.
*
* @return targetGender * Specifies the target gender of the {@link LineItem}. This field
* is only applicable if
* {@link #provider} is not null.
*/
public com.google.api.ads.admanager.axis.v202202.GrpTargetGender getTargetGender() {
return targetGender;
}
/**
* Sets the targetGender value for this GrpSettings.
*
* @param targetGender * Specifies the target gender of the {@link LineItem}. This field
* is only applicable if
* {@link #provider} is not null.
*/
public void setTargetGender(com.google.api.ads.admanager.axis.v202202.GrpTargetGender targetGender) {
this.targetGender = targetGender;
}
/**
* Gets the provider value for this GrpSettings.
*
* @return provider * Specifies the GRP provider of the {@link LineItem}.
*/
public com.google.api.ads.admanager.axis.v202202.GrpProvider getProvider() {
return provider;
}
/**
* Sets the provider value for this GrpSettings.
*
* @param provider * Specifies the GRP provider of the {@link LineItem}.
*/
public void setProvider(com.google.api.ads.admanager.axis.v202202.GrpProvider provider) {
this.provider = provider;
}
/**
* Gets the targetImpressionGoal value for this GrpSettings.
*
* @return targetImpressionGoal * Specifies the impression goal for the given target demographic.
* This field is only applicable
* if {@link #provider} is not null and demographics-based
* goal is selected by the user. If this
* field is set, {@link LineItem#primaryGoal} will have
* its {@link Goal#units} value set by Google
* to represent the estimated total quantity.
*/
public java.lang.Long getTargetImpressionGoal() {
return targetImpressionGoal;
}
/**
* Sets the targetImpressionGoal value for this GrpSettings.
*
* @param targetImpressionGoal * Specifies the impression goal for the given target demographic.
* This field is only applicable
* if {@link #provider} is not null and demographics-based
* goal is selected by the user. If this
* field is set, {@link LineItem#primaryGoal} will have
* its {@link Goal#units} value set by Google
* to represent the estimated total quantity.
*/
public void setTargetImpressionGoal(java.lang.Long targetImpressionGoal) {
this.targetImpressionGoal = targetImpressionGoal;
}
/**
* Gets the inTargetRatioEstimateMilliPercent value for this GrpSettings.
*
* @return inTargetRatioEstimateMilliPercent * Estimate for the in-target ratio given the line item's audience
* targeting. This field is only
* applicable if {@link #provider} is Nielsen, {@link
* LineItem#primaryGoal#unitType} is in-target
* impressions, and {@link LineItem#CostType} is in-target
* CPM. This field determines the
* in-target ratio to use for pacing Nielsen line items
* before Nielsen reporting data is
* available. Represented as a milli percent, so 55.7%
* becomes 55700.
*/
public java.lang.Long getInTargetRatioEstimateMilliPercent() {
return inTargetRatioEstimateMilliPercent;
}
/**
* Sets the inTargetRatioEstimateMilliPercent value for this GrpSettings.
*
* @param inTargetRatioEstimateMilliPercent * Estimate for the in-target ratio given the line item's audience
* targeting. This field is only
* applicable if {@link #provider} is Nielsen, {@link
* LineItem#primaryGoal#unitType} is in-target
* impressions, and {@link LineItem#CostType} is in-target
* CPM. This field determines the
* in-target ratio to use for pacing Nielsen line items
* before Nielsen reporting data is
* available. Represented as a milli percent, so 55.7%
* becomes 55700.
*/
public void setInTargetRatioEstimateMilliPercent(java.lang.Long inTargetRatioEstimateMilliPercent) {
this.inTargetRatioEstimateMilliPercent = inTargetRatioEstimateMilliPercent;
}
/**
* Gets the nielsenCtvPacingType value for this GrpSettings.
*
* @return nielsenCtvPacingType * Specifies which pacing computation to apply in pacing to impressions
* from connected devices.
* This field is required if {@code enableNielsenCoViewingSupport}
* is true.
*/
public com.google.api.ads.admanager.axis.v202202.NielsenCtvPacingType getNielsenCtvPacingType() {
return nielsenCtvPacingType;
}
/**
* Sets the nielsenCtvPacingType value for this GrpSettings.
*
* @param nielsenCtvPacingType * Specifies which pacing computation to apply in pacing to impressions
* from connected devices.
* This field is required if {@code enableNielsenCoViewingSupport}
* is true.
*/
public void setNielsenCtvPacingType(com.google.api.ads.admanager.axis.v202202.NielsenCtvPacingType nielsenCtvPacingType) {
this.nielsenCtvPacingType = nielsenCtvPacingType;
}
/**
* Gets the pacingDeviceCategorizationType value for this GrpSettings.
*
* @return pacingDeviceCategorizationType * Specifies whether to use Google or Nielsen device breakdown
* in Nielsen Line Item auto pacing.
*/
public com.google.api.ads.admanager.axis.v202202.PacingDeviceCategorizationType getPacingDeviceCategorizationType() {
return pacingDeviceCategorizationType;
}
/**
* Sets the pacingDeviceCategorizationType value for this GrpSettings.
*
* @param pacingDeviceCategorizationType * Specifies whether to use Google or Nielsen device breakdown
* in Nielsen Line Item auto pacing.
*/
public void setPacingDeviceCategorizationType(com.google.api.ads.admanager.axis.v202202.PacingDeviceCategorizationType pacingDeviceCategorizationType) {
this.pacingDeviceCategorizationType = pacingDeviceCategorizationType;
}
/**
* Gets the applyTrueCoview value for this GrpSettings.
*
* @return applyTrueCoview
*/
public java.lang.Boolean getApplyTrueCoview() {
return applyTrueCoview;
}
/**
* Sets the applyTrueCoview value for this GrpSettings.
*
* @param applyTrueCoview
*/
public void setApplyTrueCoview(java.lang.Boolean applyTrueCoview) {
this.applyTrueCoview = applyTrueCoview;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof GrpSettings)) return false;
GrpSettings other = (GrpSettings) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.minTargetAge==null && other.getMinTargetAge()==null) ||
(this.minTargetAge!=null &&
this.minTargetAge.equals(other.getMinTargetAge()))) &&
((this.maxTargetAge==null && other.getMaxTargetAge()==null) ||
(this.maxTargetAge!=null &&
this.maxTargetAge.equals(other.getMaxTargetAge()))) &&
((this.targetGender==null && other.getTargetGender()==null) ||
(this.targetGender!=null &&
this.targetGender.equals(other.getTargetGender()))) &&
((this.provider==null && other.getProvider()==null) ||
(this.provider!=null &&
this.provider.equals(other.getProvider()))) &&
((this.targetImpressionGoal==null && other.getTargetImpressionGoal()==null) ||
(this.targetImpressionGoal!=null &&
this.targetImpressionGoal.equals(other.getTargetImpressionGoal()))) &&
((this.inTargetRatioEstimateMilliPercent==null && other.getInTargetRatioEstimateMilliPercent()==null) ||
(this.inTargetRatioEstimateMilliPercent!=null &&
this.inTargetRatioEstimateMilliPercent.equals(other.getInTargetRatioEstimateMilliPercent()))) &&
((this.nielsenCtvPacingType==null && other.getNielsenCtvPacingType()==null) ||
(this.nielsenCtvPacingType!=null &&
this.nielsenCtvPacingType.equals(other.getNielsenCtvPacingType()))) &&
((this.pacingDeviceCategorizationType==null && other.getPacingDeviceCategorizationType()==null) ||
(this.pacingDeviceCategorizationType!=null &&
this.pacingDeviceCategorizationType.equals(other.getPacingDeviceCategorizationType()))) &&
((this.applyTrueCoview==null && other.getApplyTrueCoview()==null) ||
(this.applyTrueCoview!=null &&
this.applyTrueCoview.equals(other.getApplyTrueCoview())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getMinTargetAge() != null) {
_hashCode += getMinTargetAge().hashCode();
}
if (getMaxTargetAge() != null) {
_hashCode += getMaxTargetAge().hashCode();
}
if (getTargetGender() != null) {
_hashCode += getTargetGender().hashCode();
}
if (getProvider() != null) {
_hashCode += getProvider().hashCode();
}
if (getTargetImpressionGoal() != null) {
_hashCode += getTargetImpressionGoal().hashCode();
}
if (getInTargetRatioEstimateMilliPercent() != null) {
_hashCode += getInTargetRatioEstimateMilliPercent().hashCode();
}
if (getNielsenCtvPacingType() != null) {
_hashCode += getNielsenCtvPacingType().hashCode();
}
if (getPacingDeviceCategorizationType() != null) {
_hashCode += getPacingDeviceCategorizationType().hashCode();
}
if (getApplyTrueCoview() != null) {
_hashCode += getApplyTrueCoview().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(GrpSettings.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "GrpSettings"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("minTargetAge");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "minTargetAge"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("maxTargetAge");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "maxTargetAge"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("targetGender");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "targetGender"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "GrpTargetGender"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("provider");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "provider"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "GrpProvider"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("targetImpressionGoal");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "targetImpressionGoal"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("inTargetRatioEstimateMilliPercent");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "inTargetRatioEstimateMilliPercent"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("nielsenCtvPacingType");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "nielsenCtvPacingType"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "NielsenCtvPacingType"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("pacingDeviceCategorizationType");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "pacingDeviceCategorizationType"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "PacingDeviceCategorizationType"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("applyTrueCoview");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "applyTrueCoview"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "boolean"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
|
|
package com.bluecoreservices.monitorgasolina.automoviles;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import com.bluecoreservices.monitorgasolina.Main2Activity;
import com.bluecoreservices.monitorgasolina.R;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
public class automobileList extends Fragment {
public final static String PAGINA_DEBUG = "Lista autos Frag";
public RecyclerView mRecyclerView;
public RecyclerView.Adapter mAdapter;
public RecyclerView.LayoutManager mLayoutManager;
public automobileList() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View rootView = inflater.inflate(R.layout.fragment_automobile_list, container, false);
mRecyclerView = (RecyclerView) rootView.findViewById(R.id.automobile_list);
mLayoutManager = new LinearLayoutManager(rootView.getContext());
mRecyclerView.setHasFixedSize(true);
mRecyclerView.setLayoutManager(mLayoutManager);
return rootView;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
LoadAutomobiles();
}
@Override
public void onDetach() {
super.onDetach();
}
private void LoadAutomobiles() {
class LoginAsync extends AsyncTask<String, Void, JSONObject> {
private Dialog loadingDialog;
private final String url = "http://mg.bluecoreservices.com/api/vehicle.php";
String charset = "UTF-8";
HttpURLConnection conn;
DataOutputStream wr;
StringBuilder result = new StringBuilder();
URL urlObj;
JSONObject jObj = null;
StringBuilder sbParams;
String paramsString;
@Override
protected void onPreExecute() {
super.onPreExecute();
loadingDialog = ProgressDialog.show(getContext(), "Please wait", "Loading...");
}
@Override
protected JSONObject doInBackground(String... params) {
String idUser = "1";
sbParams = new StringBuilder();
try {
sbParams.append("accion").append("=").append(URLEncoder.encode("list", charset)).append("&");
sbParams.append("userId").append("=").append(URLEncoder.encode(idUser, charset));
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
try {
urlObj = new URL(url);
conn = (HttpURLConnection) urlObj.openConnection();
conn.setDoOutput(true);
conn.setRequestMethod("POST");
conn.setRequestProperty("Accept-Charset", charset);
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.connect();
paramsString = sbParams.toString();
wr = new DataOutputStream(conn.getOutputStream());
wr.writeBytes(paramsString);
wr.flush();
wr.close();
} catch (IOException e) {
e.printStackTrace();
}
try {
//response from the server
InputStream in = new BufferedInputStream(conn.getInputStream());
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String line;
while ((line = reader.readLine()) != null) {
result.append(line);
}
} catch (IOException e) {
e.printStackTrace();
}
conn.disconnect();
String stringResult = result.toString().trim();
try {
jObj = new JSONObject(stringResult);
} catch (JSONException e) {
e.printStackTrace();
}
return jObj;
}
@Override
protected void onPostExecute(JSONObject result) {
loadingDialog.dismiss();
//Integer listSize = mAdapter.getCount() -1;
JSONArray listaVehiculos = null;
try {
listaVehiculos = result.getJSONArray("VehicleLists");
ArrayList<String> brands = new ArrayList<String>();
ArrayList<String> models = new ArrayList<String>();
ArrayList<String> years = new ArrayList<String>();
final ArrayList<String> ids = new ArrayList<String>();
if (listaVehiculos.length() > 0){
/*brands.clear();
mAdapter.notifyDataSetChanged();*/
}
for (int i = 0; i < listaVehiculos.length(); i++){
JSONObject categoriaVehiculo = listaVehiculos.getJSONObject(i);
brands.add(categoriaVehiculo.getString("brand"));
models.add(categoriaVehiculo.getString("model"));
years.add(categoriaVehiculo.getString("year"));
ids.add(categoriaVehiculo.getString("id"));
}
Log.i(PAGINA_DEBUG, brands.toString());
Log.i(PAGINA_DEBUG, models.toString());
Log.i(PAGINA_DEBUG, years.toString());
Log.i(PAGINA_DEBUG, ids.toString());
ArrayAdapter adapter = new ArrayAdapter(getContext(), R.layout.automobile_list_element);
}catch (JSONException e) {
e.printStackTrace();
}
/*
ArrayAdapter adapter = new ArrayAdapter(getContext(), android.R.layout.simple_spinner_item, nombres);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
if (listSize > 0){
nombres.clear();
adapter.notifyDataSetChanged();
}*/
/*try {
categoriaLista = result.getJSONArray("categorias");
for (int i = 0; i < categoriaLista.length(); i++){
JSONObject categoriaElemento = categoriaLista.getJSONObject(i);
nombres.add(categoriaElemento.getString("nombre"));
ids.add(categoriaElemento.getString("id"));
}
listaCatego.setAdapter(adapter);
adapter.notifyDataSetChanged();
listaCatego.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
selectedGasType = ids.get(position);
Log.i(PAGINA_DEBUG, selectedGasType);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
} catch (JSONException e) {
e.printStackTrace();
}*/
}
}
LoginAsync la = new LoginAsync();
la.execute();
}
}
|
|
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v17.leanback.supportleanbackshowcase.app.room.db.repo;
import android.arch.lifecycle.LiveData;
import android.arch.persistence.room.Room;
import android.os.AsyncTask;
import android.support.annotation.WorkerThread;
import android.support.v17.leanback.supportleanbackshowcase.R;
import android.support.v17.leanback.supportleanbackshowcase.app.room.controller.app.SampleApplication;
import android.support.v17.leanback.supportleanbackshowcase.app.room.api.VideoDownloadingService;
import android.support.v17.leanback.supportleanbackshowcase.app.room.api.VideosWithGoogleTag;
import android.support.v17.leanback.supportleanbackshowcase.app.room.config.AppConfiguration;
import android.support.v17.leanback.supportleanbackshowcase.app.room.db.AppDatabase;
import android.support.v17.leanback.supportleanbackshowcase.app.room.db.dao.CategoryDao;
import android.support.v17.leanback.supportleanbackshowcase.app.room.db.dao.VideoDao;
import android.support.v17.leanback.supportleanbackshowcase.app.room.db.entity.CategoryEntity;
import android.support.v17.leanback.supportleanbackshowcase.app.room.db.entity.VideoEntity;
import android.support.v17.leanback.supportleanbackshowcase.utils.Utils;
import android.util.Log;
import com.google.gson.Gson;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Singleton;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
@Singleton
public class VideosRepository {
// For debugging purpose
private static final boolean DEBUG = false;
private static final String TAG = "VideosRepository";
// meta data type constant
private static final String RENTED = "rented";
private static final String STATUS = "status";
private static final String CARD = "card";
private static final String BACKGROUND = "background";
private static final String VIDEO = "video";
private static VideosRepository sVideosRepository;
private AppDatabase mDb;
private VideoDao mVideoDao;
private CategoryDao mCategoryDao;
// maintain the local cache so the live data can be shared among different components
private Map<String, LiveData<List<VideoEntity>>> mVideoEntitiesCache;
private LiveData<List<CategoryEntity>> mCategories;
public static VideosRepository getVideosRepositoryInstance() {
if (sVideosRepository == null) {
sVideosRepository = new VideosRepository();
}
return sVideosRepository;
}
/**
* View Model talks to repository through this method to fetch the live data.
*
* @param category category
* @return The list of categories which is wrapped in a live data.
*/
public LiveData<List<VideoEntity>> getVideosInSameCategoryLiveData(String category) {
// always try to retrive from local cache firstly
if (mVideoEntitiesCache.containsKey(category)) {
return mVideoEntitiesCache.get(category);
}
LiveData<List<VideoEntity>> videoEntities = mVideoDao.loadVideoInSameCateogry(category);
mVideoEntitiesCache.put(category, videoEntities);
return videoEntities;
}
public LiveData<List<CategoryEntity>> getAllCategories() {
if (mCategories == null) {
mCategories = mCategoryDao.loadAllCategories();
}
return mCategories;
}
public LiveData<List<VideoEntity>> getSearchResult(String query) {
return mVideoDao.searchVideos(query);
}
public LiveData<VideoEntity> getVideoById(Long id) {
return mVideoDao.loadVideoById(id);
}
/**
* Helper function to access the database and update the video information in the database.
*
* @param video video entity
* @param category which fields to update
* @param value updated value
*/
@WorkerThread
public synchronized void updateDatabase(VideoEntity video, String category, String value) {
try {
mDb.beginTransaction();
switch (category) {
case VIDEO:
video.setVideoLocalStorageUrl(value);
break;
case BACKGROUND:
video.setVideoBgImageLocalStorageUrl(value);
break;
case CARD:
video.setVideoCardImageLocalStorageUrl(value);
break;
case STATUS:
video.setStatus(value);
break;
case RENTED:
video.setRented(true);
break;
}
mDb.videoDao().updateVideo(video);
mDb.setTransactionSuccessful();
} finally {
mDb.endTransaction();
}
}
@Inject
public VideosRepository() {
createAndPopulateDatabase();
mVideoDao = mDb.videoDao();
mCategoryDao = mDb.categoryDao();
mVideoEntitiesCache = new HashMap<>();
}
private void createAndPopulateDatabase() {
mDb = Room.databaseBuilder(SampleApplication.getInstance(),
AppDatabase.class, AppDatabase.DATABASE_NAME).build();
// insert contents into database
try {
String url =
"https://storage.googleapis.com/android-tv/";
initializeDb(mDb, url);
} catch (IOException e) {
e.printStackTrace();
}
}
private void initializeDb(AppDatabase db, String url) throws IOException {
// json data
String json;
if (AppConfiguration.IS_DEBUGGING_VERSION) {
// when use debugging version, we won't fetch data from network but using local
// json file (only contain 4 video entities in 2 categories.)
json = Utils.inputStreamToString(SampleApplication
.getInstance()
.getApplicationContext()
.getResources()
.openRawResource(R.raw.live_movie_debug));
Gson gson = new Gson();
VideosWithGoogleTag videosWithGoogleTag = gson.fromJson(json,
VideosWithGoogleTag.class);
populateDatabase(videosWithGoogleTag,db);
} else {
buildDatabase(db, url);
}
}
/**
* Takes the contents of a JSON object and populates the database
*
* @param db Room database.
*/
private static void buildDatabase(final AppDatabase db, String url) throws IOException {
Retrofit retrofit = new Retrofit
.Builder()
.baseUrl(url)
.addConverterFactory(GsonConverterFactory.create())
.build();
VideoDownloadingService service = retrofit.create(VideoDownloadingService.class);
Call<VideosWithGoogleTag> videosWithGoogleTagCall = service.getVideosList();
videosWithGoogleTagCall.enqueue(new Callback<VideosWithGoogleTag>() {
@Override
public void onResponse(Call<VideosWithGoogleTag> call, Response<VideosWithGoogleTag> response) {
VideosWithGoogleTag videosWithGoogleTag = response.body();
if (videosWithGoogleTag == null) {
Log.d(TAG, "onResponse: result is null");
return;
}
populateDatabase(videosWithGoogleTag, db);
}
@Override
public void onFailure(Call<VideosWithGoogleTag> call, Throwable t) {
Log.d(TAG, "Fail to download the content");
}
});
}
private static void populateDatabase(VideosWithGoogleTag videosWithGoogleTag, final AppDatabase db) {
for (final VideosWithGoogleTag.VideosGroupByCategory videosGroupByCategory :
videosWithGoogleTag.getAllResources()) {
// create category table
final CategoryEntity categoryEntity = new CategoryEntity();
categoryEntity.setCategoryName(videosGroupByCategory.getCategory());
// create video table with customization
postProcessing(videosGroupByCategory);
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... voids) {
try {
db.beginTransaction();
db.categoryDao().insertCategory(categoryEntity);
db.videoDao().insertAllVideos(videosGroupByCategory.getVideos());
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
return null;
}
}.execute();
}
}
/**
* Helper function to make some customization on raw data
*/
private static void postProcessing(VideosWithGoogleTag.VideosGroupByCategory videosGroupByCategory) {
for (VideoEntity each : videosGroupByCategory.getVideos()) {
each.setCategory(videosGroupByCategory.getCategory());
each.setVideoLocalStorageUrl("");
each.setVideoBgImageLocalStorageUrl("");
each.setVideoCardImageLocalStorageUrl("");
each.setVideoUrl(each.getVideoUrls().get(0));
each.setRented(false);
each.setStatus("");
each.setTrailerVideoUrl("https://storage.googleapis.com/android-tv/Sample%20videos/Google%2B/Google%2B_%20Say%20more%20with%20Hangouts.mp4");
}
}
}
|
|
/*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package org.finos.waltz.data.application;
import org.finos.waltz.schema.tables.*;
import org.finos.waltz.data.SelectorUtilities;
import org.finos.waltz.data.data_type.DataTypeIdSelectorFactory;
import org.finos.waltz.data.logical_flow.LogicalFlowDao;
import org.finos.waltz.data.measurable.MeasurableIdSelectorFactory;
import org.finos.waltz.data.orgunit.OrganisationalUnitIdSelectorFactory;
import org.finos.waltz.model.EntityKind;
import org.finos.waltz.model.EntityReference;
import org.finos.waltz.model.IdSelectionOptions;
import org.finos.waltz.model.ImmutableIdSelectionOptions;
import org.jooq.*;
import org.jooq.impl.DSL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.util.function.Function;
import static org.finos.waltz.schema.Tables.*;
import static org.finos.waltz.schema.tables.Application.APPLICATION;
import static org.finos.waltz.schema.tables.ApplicationGroupEntry.APPLICATION_GROUP_ENTRY;
import static org.finos.waltz.schema.tables.EntityRelationship.ENTITY_RELATIONSHIP;
import static org.finos.waltz.schema.tables.FlowDiagramEntity.FLOW_DIAGRAM_ENTITY;
import static org.finos.waltz.schema.tables.Involvement.INVOLVEMENT;
import static org.finos.waltz.schema.tables.LogicalFlow.LOGICAL_FLOW;
import static org.finos.waltz.schema.tables.LogicalFlowDecorator.LOGICAL_FLOW_DECORATOR;
import static org.finos.waltz.schema.tables.MeasurableRating.MEASURABLE_RATING;
import static org.finos.waltz.schema.tables.Person.PERSON;
import static org.finos.waltz.schema.tables.PersonHierarchy.PERSON_HIERARCHY;
import static org.finos.waltz.common.Checks.checkNotNull;
import static org.finos.waltz.common.Checks.checkTrue;
import static org.finos.waltz.model.EntityLifecycleStatus.REMOVED;
import static org.finos.waltz.model.HierarchyQueryScope.EXACT;
@Service
public class ApplicationIdSelectorFactory implements Function<IdSelectionOptions, Select<Record1<Long>>> {
private static final Logger LOG = LoggerFactory.getLogger(ApplicationIdSelectorFactory.class);
private static final DataTypeIdSelectorFactory dataTypeIdSelectorFactory = new DataTypeIdSelectorFactory();
private static final MeasurableIdSelectorFactory measurableIdSelectorFactory = new MeasurableIdSelectorFactory();
private static final OrganisationalUnitIdSelectorFactory orgUnitIdSelectorFactory = new OrganisationalUnitIdSelectorFactory();
private static final FlowDiagramEntity flowDiagram = FLOW_DIAGRAM_ENTITY.as("fd");
private static final Involvement involvement = INVOLVEMENT.as("inv");
private static final LogicalFlow logicalFlow = LOGICAL_FLOW.as("lf");
private static final MeasurableRating measurableRating = MEASURABLE_RATING.as("mr");
private static final Person person = PERSON.as("p");
private static final PersonHierarchy personHierarchy = PERSON_HIERARCHY.as("ph");
public Select<Record1<Long>> apply(IdSelectionOptions options) {
checkNotNull(options, "options cannot be null");
EntityReference ref = options.entityReference();
switch (ref.kind()) {
case ACTOR:
return mkForActor(options);
case APP_GROUP:
return mkForAppGroup(options);
case APPLICATION:
return mkForApplication(options);
case CHANGE_INITIATIVE:
return mkForEntityRelationship(options);
case DATA_TYPE:
return mkForDataType(options);
case FLOW_DIAGRAM:
return mkForFlowDiagram(options);
case LICENCE:
return mkForLicence(options);
case LOGICAL_DATA_FLOW:
return mkForLogicalDataFlow(options);
case MEASURABLE:
return mkForMeasurable(options);
case SCENARIO:
return mkForScenario(options);
case SERVER:
return mkForServer(options);
case ORG_UNIT:
return mkForOrgUnit(options);
case PERSON:
return mkForPerson(options);
case PHYSICAL_FLOW:
return mkForPhysicalFlow(options);
case PHYSICAL_SPECIFICATION:
return mkForPhysicalSpec(options);
case SOFTWARE:
return mkForSoftwarePackage(options);
case SOFTWARE_VERSION:
return mkForSoftwareVersion(options);
case TAG:
return mkForTag(options);
case DATABASE:
return mkForDatabase(options);
case PROCESS_DIAGRAM:
return mkForProcessDiagram(options);
default:
throw new IllegalArgumentException("Cannot create selector for entity kind: " + ref.kind());
}
}
private Select<Record1<Long>> mkForPhysicalSpec(IdSelectionOptions options) {
return DSL
.select(APPLICATION.ID)
.from(PHYSICAL_FLOW)
.innerJoin(LOGICAL_FLOW).on(PHYSICAL_FLOW.LOGICAL_FLOW_ID.eq(LOGICAL_FLOW.ID))
.innerJoin(APPLICATION).on(LOGICAL_FLOW.SOURCE_ENTITY_ID.eq(APPLICATION.ID))
.where(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(PHYSICAL_FLOW.SPECIFICATION_ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options)))
.union(DSL
.select(APPLICATION.ID)
.from(PHYSICAL_FLOW)
.innerJoin(LOGICAL_FLOW).on(PHYSICAL_FLOW.LOGICAL_FLOW_ID.eq(LOGICAL_FLOW.ID))
.innerJoin(APPLICATION).on(LOGICAL_FLOW.TARGET_ENTITY_ID.eq(APPLICATION.ID))
.where(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(PHYSICAL_FLOW.SPECIFICATION_ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options))));
}
private Select<Record1<Long>> mkForPhysicalFlow(IdSelectionOptions options) {
return DSL
.select(APPLICATION.ID)
.from(PHYSICAL_FLOW)
.innerJoin(LOGICAL_FLOW).on(PHYSICAL_FLOW.LOGICAL_FLOW_ID.eq(LOGICAL_FLOW.ID))
.innerJoin(APPLICATION).on(LOGICAL_FLOW.SOURCE_ENTITY_ID.eq(APPLICATION.ID))
.where(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(PHYSICAL_FLOW.ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options)))
.union(DSL
.select(APPLICATION.ID)
.from(PHYSICAL_FLOW)
.innerJoin(LOGICAL_FLOW).on(PHYSICAL_FLOW.LOGICAL_FLOW_ID.eq(LOGICAL_FLOW.ID))
.innerJoin(APPLICATION).on(LOGICAL_FLOW.TARGET_ENTITY_ID.eq(APPLICATION.ID))
.where(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(PHYSICAL_FLOW.ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options))));
}
private Select<Record1<Long>> mkForLogicalDataFlow(IdSelectionOptions options) {
return DSL
.select(APPLICATION.ID)
.from(LOGICAL_FLOW)
.innerJoin(APPLICATION).on(LOGICAL_FLOW.SOURCE_ENTITY_ID.eq(APPLICATION.ID))
.where(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(LOGICAL_FLOW.ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options)))
.union(DSL
.select(APPLICATION.ID)
.from(LOGICAL_FLOW)
.innerJoin(APPLICATION).on(LOGICAL_FLOW.TARGET_ENTITY_ID.eq(APPLICATION.ID))
.where(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(LOGICAL_FLOW.ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options))));
}
private Select<Record1<Long>> mkForProcessDiagram(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
return DSL
.select(APPLICATION.ID)
.from(PROCESS_DIAGRAM_ENTITY)
.innerJoin(ENTITY_HIERARCHY)
.on(PROCESS_DIAGRAM_ENTITY.ENTITY_ID.eq(ENTITY_HIERARCHY.ANCESTOR_ID)
.and(ENTITY_HIERARCHY.KIND.eq(EntityKind.MEASURABLE.name())
.and(PROCESS_DIAGRAM_ENTITY.ENTITY_KIND.eq(EntityKind.MEASURABLE.name()))))
.innerJoin(MEASURABLE_RATING)
.on(ENTITY_HIERARCHY.ID.eq(MEASURABLE_RATING.MEASURABLE_ID))
.innerJoin(APPLICATION)
.on(MEASURABLE_RATING.ENTITY_ID.eq(APPLICATION.ID)
.and(MEASURABLE_RATING.ENTITY_KIND.eq(EntityKind.APPLICATION.name())))
.where(PROCESS_DIAGRAM_ENTITY.DIAGRAM_ID.eq(options.entityReference().id()))
.and(SelectorUtilities.mkApplicationConditions(options));
}
private Select<Record1<Long>> mkForDatabase(IdSelectionOptions options) {
return DSL.select(DATABASE_USAGE.ENTITY_ID)
.from(DATABASE_USAGE)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(DATABASE_USAGE.ENTITY_ID))
.where(DATABASE_USAGE.DATABASE_ID.eq(options.entityReference().id()))
.and(DATABASE_USAGE.ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(SelectorUtilities.mkApplicationConditions(options));
}
private Select<Record1<Long>> mkForTag(IdSelectionOptions options) {
return DSL.select(TAG_USAGE.ENTITY_ID)
.from(TAG_USAGE)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(TAG_USAGE.ENTITY_ID))
.where(TAG_USAGE.TAG_ID.eq(options.entityReference().id()))
.and(TAG_USAGE.ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(SelectorUtilities.mkApplicationConditions(options));
}
private Select<Record1<Long>> mkForSoftwarePackage(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.selectDistinct(SOFTWARE_USAGE.APPLICATION_ID)
.from(SOFTWARE_USAGE)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(SOFTWARE_USAGE.APPLICATION_ID))
.innerJoin(SOFTWARE_VERSION)
.on(SOFTWARE_VERSION.ID.eq(SOFTWARE_USAGE.SOFTWARE_VERSION_ID))
.where(SOFTWARE_VERSION.SOFTWARE_PACKAGE_ID.eq(options.entityReference().id()))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForSoftwareVersion(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.selectDistinct(SOFTWARE_USAGE.APPLICATION_ID)
.from(SOFTWARE_USAGE)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(SOFTWARE_USAGE.APPLICATION_ID))
.where(SOFTWARE_USAGE.SOFTWARE_VERSION_ID.eq(options.entityReference().id()))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForLicence(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.selectDistinct(SOFTWARE_USAGE.APPLICATION_ID)
.from(SOFTWARE_USAGE)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(SOFTWARE_USAGE.APPLICATION_ID))
.innerJoin(SOFTWARE_VERSION_LICENCE)
.on(SOFTWARE_VERSION_LICENCE.SOFTWARE_VERSION_ID.eq(SOFTWARE_USAGE.SOFTWARE_VERSION_ID))
.where(SOFTWARE_VERSION_LICENCE.LICENCE_ID.eq(options.entityReference().id()))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForScenario(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.selectDistinct(SCENARIO_RATING_ITEM.DOMAIN_ITEM_ID)
.from(SCENARIO_RATING_ITEM)
.innerJoin(APPLICATION).on(APPLICATION.ID.eq(SCENARIO_RATING_ITEM.DOMAIN_ITEM_ID))
.and(SCENARIO_RATING_ITEM.DOMAIN_ITEM_KIND.eq(EntityKind.APPLICATION.name()))
.where(SCENARIO_RATING_ITEM.SCENARIO_ID.eq(options.entityReference().id()))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForServer(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL.selectDistinct(SERVER_USAGE.ENTITY_ID)
.from(SERVER_USAGE)
.innerJoin(APPLICATION).on(APPLICATION.ID.eq(SERVER_USAGE.ENTITY_ID))
.where(SERVER_USAGE.SERVER_ID.eq(options.entityReference().id()))
.and(SERVER_USAGE.ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(applicationConditions);
}
public static Select<Record1<Long>> mkForActor(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
long actorId = options.entityReference().id();
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
Select<Record1<Long>> sourceAppIds = DSL
.select(logicalFlow.SOURCE_ENTITY_ID)
.from(logicalFlow)
.innerJoin(APPLICATION).on(APPLICATION.ID.eq(logicalFlow.SOURCE_ENTITY_ID))
.where(logicalFlow.TARGET_ENTITY_ID.eq(actorId)
.and(logicalFlow.TARGET_ENTITY_KIND.eq(EntityKind.ACTOR.name()))
.and(logicalFlow.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(logicalFlow.ENTITY_LIFECYCLE_STATUS.ne(REMOVED.name())))
.and(applicationConditions);
Select<Record1<Long>> targetAppIds = DSL.select(logicalFlow.TARGET_ENTITY_ID)
.from(logicalFlow)
.innerJoin(APPLICATION).on(APPLICATION.ID.eq(logicalFlow.TARGET_ENTITY_ID))
.where(logicalFlow.SOURCE_ENTITY_ID.eq(actorId)
.and(logicalFlow.SOURCE_ENTITY_KIND.eq(EntityKind.ACTOR.name()))
.and(logicalFlow.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(logicalFlow.ENTITY_LIFECYCLE_STATUS.ne(REMOVED.name())))
.and(applicationConditions);
return DSL.selectFrom(sourceAppIds
.union(targetAppIds).asTable());
}
private Select<Record1<Long>> mkForFlowDiagram(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
long diagramId = options.entityReference().id();
Condition logicalFlowInClause = LOGICAL_FLOW.ID.in(DSL
.select(FLOW_DIAGRAM_ENTITY.ENTITY_ID)
.from(FLOW_DIAGRAM_ENTITY)
.where(FLOW_DIAGRAM_ENTITY.ENTITY_KIND.eq(EntityKind.LOGICAL_DATA_FLOW.name())
.and(FLOW_DIAGRAM_ENTITY.DIAGRAM_ID.eq(diagramId))));
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
SelectConditionStep<Record1<Long>> directlyReferencedApps = DSL
.select(flowDiagram.ENTITY_ID)
.from(flowDiagram)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(flowDiagram.ENTITY_ID))
.where(flowDiagram.DIAGRAM_ID.eq(diagramId))
.and(flowDiagram.ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(applicationConditions);
SelectConditionStep<Record1<Long>> appsViaSourcesOfFlows = DSL
.select(LOGICAL_FLOW.SOURCE_ENTITY_ID)
.from(LOGICAL_FLOW)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(LOGICAL_FLOW.SOURCE_ENTITY_ID))
.where(logicalFlowInClause)
.and(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(applicationConditions);
SelectConditionStep<Record1<Long>> appsViaTargetsOfFlows = DSL
.select(LOGICAL_FLOW.TARGET_ENTITY_ID)
.from(LOGICAL_FLOW)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(LOGICAL_FLOW.TARGET_ENTITY_ID))
.where(logicalFlowInClause)
.and(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(applicationConditions);
return DSL.selectFrom(
directlyReferencedApps
.unionAll(appsViaSourcesOfFlows)
.unionAll(appsViaTargetsOfFlows).asTable());
}
private Select<Record1<Long>> mkForMeasurable(IdSelectionOptions options) {
Select<Record1<Long>> measurableSelector = measurableIdSelectorFactory.apply(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.select(measurableRating.ENTITY_ID)
.from(measurableRating)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(measurableRating.ENTITY_ID))
.where(measurableRating.ENTITY_KIND.eq(DSL.val(EntityKind.APPLICATION.name())))
.and(measurableRating.MEASURABLE_ID.in(measurableSelector))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForApplication(IdSelectionOptions options) {
checkTrue(options.scope() == EXACT, "Can only create selector for exact matches if given an APPLICATION ref");
return DSL.select(DSL.val(options.entityReference().id()));
}
private Select<Record1<Long>> mkForEntityRelationship(IdSelectionOptions options) {
SelectorUtilities.ensureScopeIsExact(options);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
Select<Record1<Long>> appToEntity = DSL.selectDistinct(ENTITY_RELATIONSHIP.ID_A)
.from(ENTITY_RELATIONSHIP)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(ENTITY_RELATIONSHIP.ID_A))
.where(ENTITY_RELATIONSHIP.KIND_A.eq(EntityKind.APPLICATION.name()))
.and(ENTITY_RELATIONSHIP.KIND_B.eq(options.entityReference().kind().name()))
.and(ENTITY_RELATIONSHIP.ID_B.eq(options.entityReference().id()))
.and(applicationConditions);
Select<Record1<Long>> entityToApp = DSL.selectDistinct(ENTITY_RELATIONSHIP.ID_B)
.from(ENTITY_RELATIONSHIP)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(ENTITY_RELATIONSHIP.ID_B))
.where(ENTITY_RELATIONSHIP.KIND_B.eq(EntityKind.APPLICATION.name()))
.and(ENTITY_RELATIONSHIP.KIND_A.eq(options.entityReference().kind().name()))
.and(ENTITY_RELATIONSHIP.ID_A.eq(options.entityReference().id()))
.and(applicationConditions);
return appToEntity
.union(entityToApp);
}
private SelectConditionStep<Record1<Long>> mkForOrgUnit(IdSelectionOptions options) {
ImmutableIdSelectionOptions ouSelectorOptions = ImmutableIdSelectionOptions.builder()
.entityReference(options.entityReference())
.scope(options.scope())
.build();
Select<Record1<Long>> ouSelector = orgUnitIdSelectorFactory.apply(ouSelectorOptions);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.selectDistinct(APPLICATION.ID)
.from(APPLICATION)
.where(APPLICATION.ORGANISATIONAL_UNIT_ID.in(ouSelector))
.and(applicationConditions);
}
public static SelectOrderByStep<Record1<Long>> mkForAppGroup(IdSelectionOptions options) {
if (options.scope() != EXACT) {
throw new UnsupportedOperationException(
"App Groups are not hierarchical therefore ignoring requested scope of: " + options.scope());
}
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
SelectConditionStep<Record1<Long>> associatedOrgUnits = DSL
.selectDistinct(ENTITY_HIERARCHY.ID)
.from(APPLICATION_GROUP_OU_ENTRY)
.innerJoin(ENTITY_HIERARCHY)
.on(ENTITY_HIERARCHY.ANCESTOR_ID.eq(APPLICATION_GROUP_OU_ENTRY.ORG_UNIT_ID)
.and(ENTITY_HIERARCHY.KIND.eq(DSL.val(EntityKind.ORG_UNIT.name()))))
.where(APPLICATION_GROUP_OU_ENTRY.GROUP_ID.eq(options.entityReference().id()));
SelectConditionStep<Record1<Long>> applicationIdsFromAssociatedOrgUnits = DSL
.select(APPLICATION.ID)
.from(APPLICATION)
.innerJoin(ORGANISATIONAL_UNIT)
.on(APPLICATION.ORGANISATIONAL_UNIT_ID.eq(ORGANISATIONAL_UNIT.ID))
.where(ORGANISATIONAL_UNIT.ID.in(associatedOrgUnits));
SelectConditionStep<Record1<Long>> directApps = DSL
.select(APPLICATION_GROUP_ENTRY.APPLICATION_ID)
.from(APPLICATION_GROUP_ENTRY)
.where(APPLICATION_GROUP_ENTRY.GROUP_ID.eq(options.entityReference().id()));
SelectWhereStep<Record1<Long>> appIds = DSL
.selectFrom(directApps
.union(applicationIdsFromAssociatedOrgUnits)
.asTable());
return DSL
.select(APPLICATION.ID)
.from(APPLICATION)
.where(APPLICATION.ID.in(appIds))
// .where(APPLICATION.ID.in(directApps).or(APPLICATION.ID.in(applicationIdsFromAssociatedOrgUnits)))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForPerson(IdSelectionOptions options) {
switch (options.scope()) {
case EXACT:
return mkForSinglePerson(options);
case CHILDREN:
return mkForPersonReportees(options);
default:
throw new UnsupportedOperationException(
"Querying for appIds of person using (scope: '"
+ options.scope()
+ "') not supported");
}
}
private Select<Record1<Long>> mkForPersonReportees(IdSelectionOptions options) {
Select<Record1<String>> emp = DSL
.select(person.EMPLOYEE_ID)
.from(person)
.where(person.ID.eq(options.entityReference().id()));
SelectConditionStep<Record1<String>> reporteeIds = DSL
.selectDistinct(personHierarchy.EMPLOYEE_ID)
.from(personHierarchy)
.where(personHierarchy.MANAGER_ID.eq(emp));
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
Condition condition = involvement.ENTITY_KIND.eq(EntityKind.APPLICATION.name())
.and(involvement.EMPLOYEE_ID.eq(emp)
.or(involvement.EMPLOYEE_ID.in(reporteeIds)))
.and(applicationConditions);
return DSL
.selectDistinct(involvement.ENTITY_ID)
.from(involvement)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(involvement.ENTITY_ID))
.where(condition);
}
private Select<Record1<Long>> mkForSinglePerson(IdSelectionOptions options) {
Select<Record1<String>> employeeId = DSL
.select(person.EMPLOYEE_ID)
.from(person)
.where(person.ID.eq(options.entityReference().id()));
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
return DSL
.selectDistinct(involvement.ENTITY_ID)
.from(involvement)
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(involvement.ENTITY_ID))
.where(involvement.ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(involvement.EMPLOYEE_ID.eq(employeeId))
.and(applicationConditions);
}
private Select<Record1<Long>> mkForDataType(IdSelectionOptions options) {
Select<Record1<Long>> dataTypeSelector = dataTypeIdSelectorFactory.apply(options);
Condition condition = LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID.in(dataTypeSelector)
.and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_KIND.eq(EntityKind.DATA_TYPE.name()));
Field<Long> appId = DSL.field("app_id", Long.class);
Condition applicationConditions = SelectorUtilities.mkApplicationConditions(options);
SelectConditionStep<Record1<Long>> sources = selectLogicalFlowAppsByDataType(
LOGICAL_FLOW.SOURCE_ENTITY_ID.as(appId),
condition
.and(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(applicationConditions),
LOGICAL_FLOW.SOURCE_ENTITY_ID);
SelectConditionStep<Record1<Long>> targets = selectLogicalFlowAppsByDataType(
LOGICAL_FLOW.TARGET_ENTITY_ID.as(appId),
condition
.and(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))
.and(applicationConditions),
LOGICAL_FLOW.TARGET_ENTITY_ID);
return DSL
.selectDistinct(appId)
.from(sources.union(targets).asTable());
}
private SelectConditionStep<Record1<Long>> selectLogicalFlowAppsByDataType(Field<Long> appField, Condition condition, Field<Long> joinField) {
return DSL
.select(appField)
.from(LOGICAL_FLOW)
.innerJoin(LOGICAL_FLOW_DECORATOR)
.on(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID.eq(LOGICAL_FLOW.ID))
.innerJoin(APPLICATION)
.on(APPLICATION.ID.eq(joinField))
.where(condition)
.and(LogicalFlowDao.LOGICAL_NOT_REMOVED);
}
}
|
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.08.07 at 06:17:52 PM CEST
//
package org.w3._1998.math.mathml;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.*;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
* <p>Java class for mpadded.type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="mpadded.type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <group ref="{http://www.w3.org/1998/Math/MathML}Presentation-expr.class" maxOccurs="unbounded" minOccurs="0"/>
* <attGroup ref="{http://www.w3.org/1998/Math/MathML}mpadded.attlist"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "mpadded.type", propOrder = {
"misAndMosAndMns"
})
@XmlRootElement(name = "mpadded")
public class Mpadded {
@XmlElementRefs({
@XmlElementRef(name = "menclose", namespace = "http://www.w3.org/1998/Math/MathML", type = Menclose.class, required = false),
@XmlElementRef(name = "interval", namespace = "http://www.w3.org/1998/Math/MathML", type = Interval.class, required = false),
@XmlElementRef(name = "tan", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "piecewise", namespace = "http://www.w3.org/1998/Math/MathML", type = Piecewise.class, required = false),
@XmlElementRef(name = "ceiling", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "factorof", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "arcsin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mover", namespace = "http://www.w3.org/1998/Math/MathML", type = Mover.class, required = false),
@XmlElementRef(name = "variance", namespace = "http://www.w3.org/1998/Math/MathML", type = Variance.class, required = false),
@XmlElementRef(name = "msubsup", namespace = "http://www.w3.org/1998/Math/MathML", type = Msubsup.class, required = false),
@XmlElementRef(name = "product", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mode", namespace = "http://www.w3.org/1998/Math/MathML", type = Mode.class, required = false),
@XmlElementRef(name = "mtext", namespace = "http://www.w3.org/1998/Math/MathML", type = Mtext.class, required = false),
@XmlElementRef(name = "power", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "divergence", namespace = "http://www.w3.org/1998/Math/MathML", type = Divergence.class, required = false),
@XmlElementRef(name = "matrix", namespace = "http://www.w3.org/1998/Math/MathML", type = Matrix.class, required = false),
@XmlElementRef(name = "logbase", namespace = "http://www.w3.org/1998/Math/MathML", type = Logbase.class, required = false),
@XmlElementRef(name = "degree", namespace = "http://www.w3.org/1998/Math/MathML", type = Degree.class, required = false),
@XmlElementRef(name = "outerproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Outerproduct.class, required = false),
@XmlElementRef(name = "mfenced", namespace = "http://www.w3.org/1998/Math/MathML", type = Mfenced.class, required = false),
@XmlElementRef(name = "prsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = Prsubset.class, required = false),
@XmlElementRef(name = "grad", namespace = "http://www.w3.org/1998/Math/MathML", type = Grad.class, required = false),
@XmlElementRef(name = "vectorproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Vectorproduct.class, required = false),
@XmlElementRef(name = "plus", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mphantom", namespace = "http://www.w3.org/1998/Math/MathML", type = Mphantom.class, required = false),
@XmlElementRef(name = "msup", namespace = "http://www.w3.org/1998/Math/MathML", type = Msup.class, required = false),
@XmlElementRef(name = "emptyset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "notprsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = Notprsubset.class, required = false),
@XmlElementRef(name = "card", namespace = "http://www.w3.org/1998/Math/MathML", type = Card.class, required = false),
@XmlElementRef(name = "mstyle", namespace = "http://www.w3.org/1998/Math/MathML", type = Mstyle.class, required = false),
@XmlElementRef(name = "bvar", namespace = "http://www.w3.org/1998/Math/MathML", type = Bvar.class, required = false),
@XmlElementRef(name = "cos", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "min", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "partialdiff", namespace = "http://www.w3.org/1998/Math/MathML", type = Partialdiff.class, required = false),
@XmlElementRef(name = "real", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "msub", namespace = "http://www.w3.org/1998/Math/MathML", type = Msub.class, required = false),
@XmlElementRef(name = "lcm", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mroot", namespace = "http://www.w3.org/1998/Math/MathML", type = Mroot.class, required = false),
@XmlElementRef(name = "domainofapplication", namespace = "http://www.w3.org/1998/Math/MathML", type = Domainofapplication.class, required = false),
@XmlElementRef(name = "laplacian", namespace = "http://www.w3.org/1998/Math/MathML", type = Laplacian.class, required = false),
@XmlElementRef(name = "or", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "declare", namespace = "http://www.w3.org/1998/Math/MathML", type = Declare.class, required = false),
@XmlElementRef(name = "leq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "cot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "cartesianproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Cartesianproduct.class, required = false),
@XmlElementRef(name = "arcsec", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "csc", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "approx", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "gt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "exponentiale", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "union", namespace = "http://www.w3.org/1998/Math/MathML", type = Union.class, required = false),
@XmlElementRef(name = "image", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "exp", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "arcsinh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "rationals", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "in", namespace = "http://www.w3.org/1998/Math/MathML", type = In.class, required = false),
@XmlElementRef(name = "tanh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "divide", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "condition", namespace = "http://www.w3.org/1998/Math/MathML", type = Condition.class, required = false),
@XmlElementRef(name = "arccsch", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mn", namespace = "http://www.w3.org/1998/Math/MathML", type = Mn.class, required = false),
@XmlElementRef(name = "arctanh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "uplimit", namespace = "http://www.w3.org/1998/Math/MathML", type = Uplimit.class, required = false),
@XmlElementRef(name = "naturalnumbers", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "notsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = Notsubset.class, required = false),
@XmlElementRef(name = "intersect", namespace = "http://www.w3.org/1998/Math/MathML", type = Intersect.class, required = false),
@XmlElementRef(name = "imaginaryi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "eq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "cn", namespace = "http://www.w3.org/1998/Math/MathML", type = Cn.class, required = false),
@XmlElementRef(name = "minus", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "true", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "ci", namespace = "http://www.w3.org/1998/Math/MathML", type = Ci.class, required = false),
@XmlElementRef(name = "arg", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "lowlimit", namespace = "http://www.w3.org/1998/Math/MathML", type = Lowlimit.class, required = false),
@XmlElementRef(name = "pi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "ident", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mtable", namespace = "http://www.w3.org/1998/Math/MathML", type = Mtable.class, required = false),
@XmlElementRef(name = "domain", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "malignmark", namespace = "http://www.w3.org/1998/Math/MathML", type = Malignmark.class, required = false),
@XmlElementRef(name = "sum", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "apply", namespace = "http://www.w3.org/1998/Math/MathML", type = Apply.class, required = false),
@XmlElementRef(name = "mpadded", namespace = "http://www.w3.org/1998/Math/MathML", type = Mpadded.class, required = false),
@XmlElementRef(name = "sec", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mo", namespace = "http://www.w3.org/1998/Math/MathML", type = Mo.class, required = false),
@XmlElementRef(name = "momentabout", namespace = "http://www.w3.org/1998/Math/MathML", type = Momentabout.class, required = false),
@XmlElementRef(name = "arctan", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "and", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "root", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "subset", namespace = "http://www.w3.org/1998/Math/MathML", type = Subset.class, required = false),
@XmlElementRef(name = "notin", namespace = "http://www.w3.org/1998/Math/MathML", type = Notin.class, required = false),
@XmlElementRef(name = "coth", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "csch", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "maction", namespace = "http://www.w3.org/1998/Math/MathML", type = Maction.class, required = false),
@XmlElementRef(name = "tendsto", namespace = "http://www.w3.org/1998/Math/MathML", type = Tendsto.class, required = false),
@XmlElementRef(name = "reals", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "false", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "scalarproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Scalarproduct.class, required = false),
@XmlElementRef(name = "inverse", namespace = "http://www.w3.org/1998/Math/MathML", type = Inverse.class, required = false),
@XmlElementRef(name = "integers", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "neq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "infinity", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "lt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "eulergamma", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "times", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "notanumber", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "arccosh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "arccos", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "munder", namespace = "http://www.w3.org/1998/Math/MathML", type = Munder.class, required = false),
@XmlElementRef(name = "forall", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "int", namespace = "http://www.w3.org/1998/Math/MathML", type = Int.class, required = false),
@XmlElementRef(name = "sinh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "codomain", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "setdiff", namespace = "http://www.w3.org/1998/Math/MathML", type = Setdiff.class, required = false),
@XmlElementRef(name = "transpose", namespace = "http://www.w3.org/1998/Math/MathML", type = Transpose.class, required = false),
@XmlElementRef(name = "semantics", namespace = "http://www.w3.org/1998/Math/MathML", type = Semantics.class, required = false),
@XmlElementRef(name = "selector", namespace = "http://www.w3.org/1998/Math/MathML", type = Selector.class, required = false),
@XmlElementRef(name = "sdev", namespace = "http://www.w3.org/1998/Math/MathML", type = Sdev.class, required = false),
@XmlElementRef(name = "arccot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mi", namespace = "http://www.w3.org/1998/Math/MathML", type = Mi.class, required = false),
@XmlElementRef(name = "implies", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "rem", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "csymbol", namespace = "http://www.w3.org/1998/Math/MathML", type = Csymbol.class, required = false),
@XmlElementRef(name = "complexes", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "ln", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "limit", namespace = "http://www.w3.org/1998/Math/MathML", type = Limit.class, required = false),
@XmlElementRef(name = "sech", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "equivalent", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "arccoth", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "maligngroup", namespace = "http://www.w3.org/1998/Math/MathML", type = Maligngroup.class, required = false),
@XmlElementRef(name = "xor", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "lambda", namespace = "http://www.w3.org/1998/Math/MathML", type = Lambda.class, required = false),
@XmlElementRef(name = "exists", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "vector", namespace = "http://www.w3.org/1998/Math/MathML", type = Vector.class, required = false),
@XmlElementRef(name = "cosh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "list", namespace = "http://www.w3.org/1998/Math/MathML", type = org.w3._1998.math.mathml.List.class, required = false),
@XmlElementRef(name = "curl", namespace = "http://www.w3.org/1998/Math/MathML", type = Curl.class, required = false),
@XmlElementRef(name = "abs", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "msqrt", namespace = "http://www.w3.org/1998/Math/MathML", type = Msqrt.class, required = false),
@XmlElementRef(name = "sin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "geq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mfrac", namespace = "http://www.w3.org/1998/Math/MathML", type = Mfrac.class, required = false),
@XmlElementRef(name = "moment", namespace = "http://www.w3.org/1998/Math/MathML", type = Moment.class, required = false),
@XmlElementRef(name = "max", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "mmultiscripts", namespace = "http://www.w3.org/1998/Math/MathML", type = Mmultiscripts.class, required = false),
@XmlElementRef(name = "quotient", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "gcd", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "ms", namespace = "http://www.w3.org/1998/Math/MathML", type = Ms.class, required = false),
@XmlElementRef(name = "imaginary", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "munderover", namespace = "http://www.w3.org/1998/Math/MathML", type = Munderover.class, required = false),
@XmlElementRef(name = "mean", namespace = "http://www.w3.org/1998/Math/MathML", type = Mean.class, required = false),
@XmlElementRef(name = "compose", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "set", namespace = "http://www.w3.org/1998/Math/MathML", type = Set.class, required = false),
@XmlElementRef(name = "mspace", namespace = "http://www.w3.org/1998/Math/MathML", type = Mspace.class, required = false),
@XmlElementRef(name = "mrow", namespace = "http://www.w3.org/1998/Math/MathML", type = Mrow.class, required = false),
@XmlElementRef(name = "median", namespace = "http://www.w3.org/1998/Math/MathML", type = Median.class, required = false),
@XmlElementRef(name = "primes", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "log", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "floor", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "arccsc", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "determinant", namespace = "http://www.w3.org/1998/Math/MathML", type = Determinant.class, required = false),
@XmlElementRef(name = "not", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "diff", namespace = "http://www.w3.org/1998/Math/MathML", type = Diff.class, required = false),
@XmlElementRef(name = "arcsech", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "conjugate", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false),
@XmlElementRef(name = "merror", namespace = "http://www.w3.org/1998/Math/MathML", type = Merror.class, required = false),
@XmlElementRef(name = "factorial", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false)
})
protected java.util.List<Object> misAndMosAndMns;
@XmlAttribute(name = "width")
protected String width;
@XmlAttribute(name = "lspace")
protected String lspace;
@XmlAttribute(name = "height")
protected String height;
@XmlAttribute(name = "depth")
protected String depth;
@XmlAttribute(name = "class")
@XmlSchemaType(name = "NMTOKENS")
protected java.util.List<String> clazzs;
@XmlAttribute(name = "style")
protected String style;
@XmlAttribute(name = "xref")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
protected Object xref;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anyURI")
protected String href;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<>();
/**
* Gets the value of the misAndMosAndMns property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the misAndMosAndMns property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getMisAndMosAndMns().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Menclose }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Interval }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Piecewise }
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link Mover }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Msubsup }
* {@link Variance }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Mtext }
* {@link Mode }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Matrix }
* {@link Divergence }
* {@link Logbase }
* {@link Outerproduct }
* {@link Degree }
* {@link Mfenced }
* {@link Prsubset }
* {@link Msup }
* {@link Mphantom }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Vectorproduct }
* {@link Grad }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Card }
* {@link Notprsubset }
* {@link Mstyle }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Bvar }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Partialdiff }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Msub }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Domainofapplication }
* {@link Mroot }
* {@link Laplacian }
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link Declare }
* {@link Cartesianproduct }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Union }
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link In }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Condition }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Mn }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Uplimit }
* {@link Notsubset }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Intersect }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link Cn }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Ci }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Lowlimit }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link Mtable }
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link Malignmark }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Apply }
* {@link Mpadded }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Mo }
* {@link Momentabout }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Subset }
* {@link Notin }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Maction }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Tendsto }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Scalarproduct }
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Inverse }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Munder }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link Int }
* {@link Setdiff }
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Transpose }
* {@link Semantics }
* {@link Selector }
* {@link Sdev }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link Mi }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Csymbol }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Limit }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link Maligngroup }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link Lambda }
* {@link org.w3._1998.math.mathml.List }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Vector }
* {@link Curl }
* {@link Msqrt }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link Mfrac }
* {@link Moment }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Mmultiscripts }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Ms }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link Munderover }
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link Mean }
* {@link Set }
* {@link Mspace }
* {@link Mrow }
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link Median }
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Determinant }
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link Diff }
* {@link Merror }
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
*
*
*/
public java.util.List<Object> getMisAndMosAndMns() {
if (misAndMosAndMns == null) {
misAndMosAndMns = new ArrayList<>();
}
return this.misAndMosAndMns;
}
/**
* Gets the value of the width property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getWidth() {
return width;
}
/**
* Sets the value of the width property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setWidth(String value) {
this.width = value;
}
/**
* Gets the value of the lspace property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLspace() {
return lspace;
}
/**
* Sets the value of the lspace property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLspace(String value) {
this.lspace = value;
}
/**
* Gets the value of the height property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHeight() {
return height;
}
/**
* Sets the value of the height property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHeight(String value) {
this.height = value;
}
/**
* Gets the value of the depth property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDepth() {
return depth;
}
/**
* Sets the value of the depth property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDepth(String value) {
this.depth = value;
}
/**
* Gets the value of the clazzs property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazzs property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazzs().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public java.util.List<String> getClazzs() {
if (clazzs == null) {
clazzs = new ArrayList<>();
}
return this.clazzs;
}
/**
* Gets the value of the style property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStyle() {
return style;
}
/**
* Sets the value of the style property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStyle(String value) {
this.style = value;
}
/**
* Gets the value of the xref property.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getXref() {
return xref;
}
/**
* Sets the value of the xref property.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setXref(Object value) {
this.xref = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
|
|
/**
Copyright 2017 Andrea "Stock" Stocchero
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pepstock.charba.client.configuration;
import org.pepstock.charba.client.callbacks.ColorCallback;
import org.pepstock.charba.client.callbacks.FontCallback;
import org.pepstock.charba.client.callbacks.NativeCallback;
import org.pepstock.charba.client.callbacks.ScaleContext;
import org.pepstock.charba.client.callbacks.ScriptableFunctions.ProxyBooleanCallback;
import org.pepstock.charba.client.callbacks.ScriptableFunctions.ProxyIntegerCallback;
import org.pepstock.charba.client.callbacks.ScriptableFunctions.ProxyNativeObjectCallback;
import org.pepstock.charba.client.callbacks.ScriptableFunctions.ProxyObjectCallback;
import org.pepstock.charba.client.callbacks.ScriptableFunctions.ProxyStringCallback;
import org.pepstock.charba.client.callbacks.ScriptableIntegerChecker;
import org.pepstock.charba.client.callbacks.ScriptableUtils;
import org.pepstock.charba.client.callbacks.ShowLabelBackdropCallback;
import org.pepstock.charba.client.callbacks.WidthCallback;
import org.pepstock.charba.client.colors.ColorBuilder;
import org.pepstock.charba.client.colors.IsColor;
import org.pepstock.charba.client.commons.CallbackProxy;
import org.pepstock.charba.client.commons.JsHelper;
import org.pepstock.charba.client.commons.Key;
import org.pepstock.charba.client.options.IsScriptableFontProvider;
import org.pepstock.charba.client.options.Ticks;
/**
* Specific tick with minimum and maximum sub ticks.
*
* @author Andrea "Stock" Stocchero
*/
abstract class Tick extends AxisContainer implements IsScriptableFontProvider<ScaleContext> {
// ---------------------------
// -- CALLBACKS PROXIES ---
// ---------------------------
// callback proxy to invoke the font function
private final CallbackProxy<ProxyNativeObjectCallback> fontCallbackProxy = JsHelper.get().newCallbackProxy();
// callback proxy to invoke the color function
private final CallbackProxy<ProxyStringCallback> colorCallbackProxy = JsHelper.get().newCallbackProxy();
// callback proxy to invoke the text stroke color function
private final CallbackProxy<ProxyObjectCallback> textStrokeColorCallbackProxy = JsHelper.get().newCallbackProxy();
// callback proxy to invoke the text stroke width function
private final CallbackProxy<ProxyIntegerCallback> textStrokeWidthCallbackProxy = JsHelper.get().newCallbackProxy();
// callback proxy to invoke the backdrop color function
private final CallbackProxy<ProxyObjectCallback> backdropColorCallbackProxy = JsHelper.get().newCallbackProxy();
// callback proxy to invoke the show label backdrop function
private final CallbackProxy<ProxyBooleanCallback> showLabelBackdropCallbackProxy = JsHelper.get().newCallbackProxy();
// font callback instance
private FontCallback<ScaleContext> fontCallback = null;
// color callback instance
private ColorCallback<ScaleContext> colorCallback = null;
// text color callback instance
private ColorCallback<ScaleContext> textStrokeColorCallback = null;
// text width callback instance
private WidthCallback<ScaleContext> textStrokeWidthCallback = null;
// backdrop color callback instance
private ColorCallback<ScaleContext> backdropColorCallback = null;
// show label backdrop callback instance
private ShowLabelBackdropCallback showLabelBackdropCallback = null;
// major tick instance
private final Major major;
// font instance
private final Font font;
// padding instance
private final Padding backdropPadding;
/**
* Name of properties of native object.
*/
enum Property implements Key
{
CALLBACK("callback"),
COLOR("color"),
FONT("font"),
TEXT_STROKE_COLOR("textStrokeColor"),
TEXT_STROKE_WIDTH("textStrokeWidth"),
BACKDROP_PADDING("backdropPadding"),
BACKDROP_COLOR("backdropColor"),
SHOW_LABEL_BACKDROP("showLabelBackdrop");
// name value of property
private final String value;
/**
* Creates with the property value to use in the native object.
*
* @param value value of property name
*/
private Property(String value) {
this.value = value;
}
/*
* (non-Javadoc)
*
* @see org.pepstock.charba.client.commons.Key#value()
*/
@Override
public String value() {
return value;
}
}
/**
* Builds the object storing the axis which this tick belongs to.
*
* @param axis axis which this tick belongs to.
*/
Tick(Axis axis) {
super(axis);
// creates sub element
this.major = new Major(axis, this);
this.font = new Font(this, () -> getConfiguration().getFont());
this.backdropPadding = new Padding(() -> getAxis().getScale().getTicks().getBackdropPadding());
// -------------------------------
// -- SET CALLBACKS to PROXIES ---
// -------------------------------
// sets function to proxy callback in order to invoke the java interface
this.fontCallbackProxy.setCallback(context -> ScriptableUtils.getOptionValueAsFont(getAxis().createContext(context), getFontCallback(), getAxis().getDefaultValues().getTicks().getFont()).nativeObject());
// sets function to proxy callback in order to invoke the java interface
this.colorCallbackProxy.setCallback(context -> onColor(getAxis().createContext(context), getColorCallback()));
// sets function to proxy callback in order to invoke the java interface
this.textStrokeColorCallbackProxy.setCallback(context -> onColor(getAxis().createContext(context), getTextStrokeColorCallback()));
// sets function to proxy callback in order to invoke the java interface
this.textStrokeWidthCallbackProxy.setCallback(
context -> ScriptableUtils.getOptionValueAsNumber(getAxis().createContext(context), getTextStrokeWidthCallback(), getAxis().getDefaultValues().getTicks().getTextStrokeWidth(), ScriptableIntegerChecker.POSITIVE_OR_DEFAULT).intValue());
// sets function to proxy callback in order to invoke the java interface
this.backdropColorCallbackProxy.setCallback(context -> ScriptableUtils.getOptionValueAsColor(getAxis().createContext(context), getBackdropColorCallback(), getAxis().getDefaultValues().getTicks().getBackdropColorAsString(), false));
// sets function to proxy callback in order to invoke the java interface
this.showLabelBackdropCallbackProxy.setCallback(context -> ScriptableUtils.getOptionValue(getAxis().createContext(context), getShowLabelBackdropCallback(), getAxis().getDefaultValues().getTicks().isShowLabelBackdrop()));
}
/**
* Returns the options element for tick.
*
* @return the configuration
*/
final Ticks getConfiguration() {
return getAxis().getScale().getTicks();
}
/**
* Returns major tick element.
*
* @return the major
*/
public Major getMajor() {
return major;
}
/**
* Returns the font element.
*
* @return the font element
*/
public Font getFont() {
return font;
}
/**
* Returns the padding of label backdrop.
*
* @return padding of label backdrop.
*/
public Padding getBackdropPadding() {
return backdropPadding;
}
/**
* Sets the font color.
*
* @param color font color.
*/
public void setColor(IsColor color) {
setColor(IsColor.checkAndGetValue(color));
}
/**
* Sets the font color.
*
* @param color font color.
*/
public void setColor(String color) {
// resets callback
setColor((ColorCallback<ScaleContext>) null);
// stores the value
getConfiguration().setColor(color);
}
/**
* Returns the font color as string.
*
* @return font color as string
*/
public String getColorAsString() {
return getConfiguration().getColorAsString();
}
/**
* Returns the font color.
*
* @return font color
*/
public IsColor getColor() {
return ColorBuilder.parse(getColorAsString());
}
/**
* If true, show tick marks.
*
* @param display if true, show tick marks
*/
public void setDisplay(boolean display) {
getConfiguration().setDisplay(display);
}
/**
* If true, show tick marks
*
* @return if true, show tick marks.
*/
public boolean isDisplay() {
return getConfiguration().isDisplay();
}
/**
* Sets z-index of tick layer. Useful when ticks are drawn on chart area.<br>
* Values less than or equals to 0 are drawn under datasets, greater than 0 on top.
*
* @param z z-index of tick layer. Useful when ticks are drawn on chart area.<br>
* Values less than or equals to 0 are drawn under datasets, greater than 0 on top.
*/
public void setZ(int z) {
getConfiguration().setZ(z);
}
/**
* Returns z-index of tick layer. Useful when ticks are drawn on chart area.<br>
* Values less than or equals to 0 are drawn under datasets, greater than 0 on top.
*
* @return z-index of tick layer. Useful when ticks are drawn on chart area.<br>
* Values less than or equals to 0 are drawn under datasets, greater than 0 on top.
*/
public int getZ() {
return getConfiguration().getZ();
}
/**
* Sets the text stroke color.
*
* @param color the text stroke color
*/
public void setTextStrokeColor(IsColor color) {
setTextStrokeColor(IsColor.checkAndGetValue(color));
}
/**
* Sets the text stroke color.
*
* @param color the text stroke color.
*/
public void setTextStrokeColor(String color) {
// resets callback
setTextStrokeColor((ColorCallback<ScaleContext>) null);
// stores the value
getConfiguration().setTextStrokeColor(color);
}
/**
* Returns the text stroke color as string.
*
* @return the text stroke color as string.
*/
public String getTextStrokeColorAsString() {
return getConfiguration().getTextStrokeColorAsString();
}
/**
* Returns the text stroke color.
*
* @return the text stroke color.
*/
public IsColor getTextStrokeColor() {
return getConfiguration().getTextStrokeColor();
}
/**
* Sets the text stroke width.
*
* @param textStrokeWidth the text stroke width.
*/
public void setTextStrokeWidth(int textStrokeWidth) {
// resets callback
setTextStrokeWidth((WidthCallback<ScaleContext>) null);
// stores the value
getConfiguration().setTextStrokeWidth(textStrokeWidth);
}
/**
* Returns the text stroke width.
*
* @return the text stroke width.
*/
public int getTextStrokeWidth() {
return getConfiguration().getTextStrokeWidth();
}
/**
* Sets the padding between the tick label and the axis. When set on a vertical axis, this applies in the horizontal (X) direction. When set on a horizontal axis, this applies
* in the vertical (Y) direction.
*
* @param padding padding between the tick label and the axis. When set on a vertical axis, this applies in the horizontal (X) direction. When set on a horizontal axis, this
* applies in the vertical (Y) direction.
*/
public void setPadding(int padding) {
getConfiguration().setPadding(padding);
}
/**
* Returns the padding between the tick label and the axis. When set on a vertical axis, this applies in the horizontal (X) direction. When set on a horizontal axis, this
* applies in the vertical (Y) direction.
*
* @return padding between the tick label and the axis. When set on a vertical axis, this applies in the horizontal (X) direction. When set on a horizontal axis, this applies
* in the vertical (Y) direction.
*/
public int getPadding() {
return getConfiguration().getPadding();
}
/**
* Sets the color of label backdrops.
*
* @param backdropColor color of label backdrops.
*/
public void setBackdropColor(IsColor backdropColor) {
// reset callback
setBackdropColor((ColorCallback<ScaleContext>) null);
// stores values
getConfiguration().setBackdropColor(backdropColor);
}
/**
* Sets the color of label backdrops.
*
* @param backdropColor color of label backdrops.
*/
public void setBackdropColor(String backdropColor) {
// reset callback
setBackdropColor((ColorCallback<ScaleContext>) null);
// stores values
getConfiguration().setBackdropColor(backdropColor);
}
/**
* Returns the color of label backdrops.
*
* @return color of label backdrops.
*/
public String getBackdropColorAsString() {
return getConfiguration().getBackdropColorAsString();
}
/**
* Returns the color of label backdrops.
*
* @return color of label backdrops.
*/
public IsColor getBackdropColor() {
return getConfiguration().getBackdropColor();
}
/**
* If true, draw a background behind the tick labels.
*
* @param showLabelBackdrop if true, draw a background behind the tick labels.
*/
public void setShowLabelBackdrop(boolean showLabelBackdrop) {
// reset callback
setShowLabelBackdrop((ShowLabelBackdropCallback) null);
// stores values
getConfiguration().setShowLabelBackdrop(showLabelBackdrop);
}
/**
* If true, draw a background behind the tick labels.
*
* @return if true, draw a background behind the tick labels.
*/
public boolean isShowLabelBackdrop() {
return getConfiguration().isShowLabelBackdrop();
}
// ---------------------
// CALLBACKS
// ---------------------
/**
* Returns the font callback, if set, otherwise <code>null</code>.
*
* @return the font callback, if set, otherwise <code>null</code>.
*/
@Override
public FontCallback<ScaleContext> getFontCallback() {
return fontCallback;
}
/**
* Returns the color callback, if set, otherwise <code>null</code>.
*
* @return the color callback, if set, otherwise <code>null</code>.
*/
public ColorCallback<ScaleContext> getColorCallback() {
return colorCallback;
}
/**
* Returns the text stroke color callback, if set, otherwise <code>null</code>.
*
* @return the text stroke color callback, if set, otherwise <code>null</code>.
*/
public ColorCallback<ScaleContext> getTextStrokeColorCallback() {
return textStrokeColorCallback;
}
/**
* Returns the text stroke width callback, if set, otherwise <code>null</code>.
*
* @return the text stroke width callback, if set, otherwise <code>null</code>.
*/
public WidthCallback<ScaleContext> getTextStrokeWidthCallback() {
return textStrokeWidthCallback;
}
/**
* Returns the backdrop color callback instance.
*
* @return the backdrop color callback instance
*/
public ColorCallback<ScaleContext> getBackdropColorCallback() {
return backdropColorCallback;
}
/**
* Returns the show label backdrop callback instance.
*
* @return the show label backdrop callback instance
*/
public ShowLabelBackdropCallback getShowLabelBackdropCallback() {
return showLabelBackdropCallback;
}
/**
* Sets the color callback.
*
* @param colorCallback the color callback to set
*/
public void setColor(ColorCallback<ScaleContext> colorCallback) {
// sets the callback
this.colorCallback = colorCallback;
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.COLOR, colorCallback, colorCallbackProxy);
}
/**
* Sets the color callback.
*
* @param colorCallback the color callback to set
*/
public void setColor(NativeCallback colorCallback) {
// resets the callback
setColor((ColorCallback<ScaleContext>) null);
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.COLOR, colorCallback);
}
/**
* Sets the text stroke color callback.
*
* @param textStrokeColorCallback the text stroke color callback to set
*/
public void setTextStrokeColor(ColorCallback<ScaleContext> textStrokeColorCallback) {
// sets the callback
this.textStrokeColorCallback = textStrokeColorCallback;
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.TEXT_STROKE_COLOR, textStrokeColorCallback, textStrokeColorCallbackProxy);
}
/**
* Sets the text stroke color callback.
*
* @param textStrokeColorCallback the text stroke color callback to set
*/
public void setTextStrokeColor(NativeCallback textStrokeColorCallback) {
// resets the callback
setTextStrokeColor((ColorCallback<ScaleContext>) null);
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.TEXT_STROKE_COLOR, textStrokeColorCallback);
}
/**
* Sets the text stroke width callback.
*
* @param textStrokeWidthCallback the text stroke width callback to set
*/
public void setTextStrokeWidth(WidthCallback<ScaleContext> textStrokeWidthCallback) {
// sets the callback
this.textStrokeWidthCallback = textStrokeWidthCallback;
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.TEXT_STROKE_WIDTH, textStrokeWidthCallback, textStrokeWidthCallbackProxy);
}
/**
* Sets the text stroke width callback.
*
* @param textStrokeWidthCallback the text stroke width callback to set
*/
public void setTextStrokeWidth(NativeCallback textStrokeWidthCallback) {
// resets the callback
setTextStrokeWidth((WidthCallback<ScaleContext>) null);
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.TEXT_STROKE_WIDTH, textStrokeWidthCallback);
}
/**
* Sets the font callback.
*
* @param fontCallback the font callback to set
*/
@Override
public void setFont(FontCallback<ScaleContext> fontCallback) {
// sets the callback
this.fontCallback = fontCallback;
// stores and manages callback
getAxis().setCallback(getConfiguration(), Property.FONT, fontCallback, fontCallbackProxy);
}
/**
* Sets the font callback.
*
* @param fontCallback the font callback to set
*/
@Override
public void setFont(NativeCallback fontCallback) {
// resets callback
setFont((FontCallback<ScaleContext>) null);
// stores callback
getAxis().setCallback(getConfiguration(), Property.FONT, fontCallback);
}
/**
* Sets the backdrop color callback instance.
*
* @param backdropColorCallback the backdrop color callback instance
*/
public void setBackdropColor(ColorCallback<ScaleContext> backdropColorCallback) {
// stores callback
this.backdropColorCallback = backdropColorCallback;
// stores and manages callback
getAxis().setCallback(getAxis().getConfiguration().getTicks(), Property.BACKDROP_COLOR, backdropColorCallback, backdropColorCallbackProxy);
}
/**
* Sets the backdrop color callback instance.
*
* @param backdropColorCallback the backdrop color callback instance
*/
public void setBackdropColor(NativeCallback backdropColorCallback) {
// resets callback
setBackdropColor((ColorCallback<ScaleContext>) null);
// stores and manages callback
getAxis().setCallback(getAxis().getConfiguration().getTicks(), Property.BACKDROP_COLOR, backdropColorCallback);
}
/**
* Sets the show label backdrop callback instance.
*
* @param showLabelBackdropCallback the show label backdrop callback instance
*/
public void setShowLabelBackdrop(ShowLabelBackdropCallback showLabelBackdropCallback) {
// stores callback
this.showLabelBackdropCallback = showLabelBackdropCallback;
// stores and manages callback
getAxis().setCallback(getAxis().getConfiguration().getTicks(), Property.SHOW_LABEL_BACKDROP, showLabelBackdropCallback, showLabelBackdropCallbackProxy);
}
/**
* Sets the show label backdrop callback instance.
*
* @param showLabelBackdropCallback the show label backdrop callback instance
*/
public void setShowLabelBackdrop(NativeCallback showLabelBackdropCallback) {
// resets callback
setShowLabelBackdrop((ShowLabelBackdropCallback) null);
// stores and manages callback
getAxis().setCallback(getAxis().getConfiguration().getTicks(), Property.SHOW_LABEL_BACKDROP, showLabelBackdropCallback);
}
// ------------------------------
// internal methods for callback
// ------------------------------
/**
* Returns a string as color when the callback has been activated.
*
* @param context native object as context
* @param callback callback to invoke
* @return a string as color
*/
private String onColor(ScaleContext context, ColorCallback<ScaleContext> callback) {
// gets default color
String defaultColor = getAxis().getDefaultValues().getTicks().getColorAsString();
// gets value
Object result = ScriptableUtils.getOptionValueAsColor(context, callback, defaultColor, false);
// checks if result is a string
if (result instanceof String) {
// returns result
return (String) result;
}
// default result
return defaultColor;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.realm;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.util.Map;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.TextInputCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.FailedLoginException;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
import javax.servlet.http.HttpServletRequest;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.digester.Digester;
/**
* <p>Implementation of the JAAS <strong>LoginModule</strong> interface,
* primarily for use in testing <code>JAASRealm</code>. It utilizes an
* XML-format data file of username/password/role information identical to
* that supported by <code>org.apache.catalina.realm.MemoryRealm</code>
* (except that digested passwords are not supported).</p>
*
* <p>This class recognizes the following string-valued options, which are
* specified in the configuration file (and passed to our constructor in
* the <code>options</code> argument:</p>
* <ul>
* <li><strong>debug</strong> - Set to "true" to get debugging messages
* generated to System.out. The default value is <code>false</code>.</li>
* <li><strong>pathname</strong> - Relative (to the pathname specified by the
* "catalina.base" system property) or absolute pathname to the
* XML file containing our user information, in the format supported by
* {@link MemoryRealm}. The default value matches the MemoryRealm
* default.</li>
* </ul>
*
* <p><strong>IMPLEMENTATION NOTE</strong> - This class implements
* <code>Realm</code> only to satisfy the calling requirements of the
* <code>GenericPrincipal</code> constructor. It does not actually perform
* the functionality required of a <code>Realm</code> implementation.</p>
*
* @author Craig R. McClanahan
*/
public class JAASMemoryLoginModule extends MemoryRealm implements LoginModule {
// We need to extend MemoryRealm to avoid class cast
private static final Log log = LogFactory.getLog(JAASMemoryLoginModule.class);
// ----------------------------------------------------- Instance Variables
/**
* The callback handler responsible for answering our requests.
*/
protected CallbackHandler callbackHandler = null;
/**
* Has our own <code>commit()</code> returned successfully?
*/
protected boolean committed = false;
/**
* The configuration information for this <code>LoginModule</code>.
*/
protected Map<String,?> options = null;
/**
* The absolute or relative pathname to the XML configuration file.
*/
protected String pathname = "conf/tomcat-users.xml";
/**
* The <code>Principal</code> identified by our validation, or
* <code>null</code> if validation failed.
*/
protected Principal principal = null;
/**
* The state information that is shared with other configured
* <code>LoginModule</code> instances.
*/
protected Map<String,?> sharedState = null;
/**
* The subject for which we are performing authentication.
*/
protected Subject subject = null;
// --------------------------------------------------------- Public Methods
public JAASMemoryLoginModule() {
log.debug("MEMORY LOGIN MODULE");
}
/**
* Phase 2 of authenticating a <code>Subject</code> when Phase 1
* fails. This method is called if the <code>LoginContext</code>
* failed somewhere in the overall authentication chain.
*
* @return <code>true</code> if this method succeeded, or
* <code>false</code> if this <code>LoginModule</code> should be
* ignored
*
* @exception LoginException if the abort fails
*/
@Override
public boolean abort() throws LoginException {
// If our authentication was not successful, just return false
if (principal == null)
return (false);
// Clean up if overall authentication failed
if (committed)
logout();
else {
committed = false;
principal = null;
}
log.debug("Abort");
return (true);
}
/**
* Phase 2 of authenticating a <code>Subject</code> when Phase 1
* was successful. This method is called if the <code>LoginContext</code>
* succeeded in the overall authentication chain.
*
* @return <code>true</code> if the authentication succeeded, or
* <code>false</code> if this <code>LoginModule</code> should be
* ignored
*
* @exception LoginException if the commit fails
*/
@Override
public boolean commit() throws LoginException {
log.debug("commit " + principal);
// If authentication was not successful, just return false
if (principal == null)
return (false);
// Add our Principal to the Subject if needed
if (!subject.getPrincipals().contains(principal)) {
subject.getPrincipals().add(principal);
// Add the roles as additional subjects as per the contract with the
// JAASRealm
if (principal instanceof GenericPrincipal) {
String roles[] = ((GenericPrincipal) principal).getRoles();
for (int i = 0; i < roles.length; i++) {
subject.getPrincipals().add(
new GenericPrincipal(roles[i], null, null));
}
}
}
committed = true;
return (true);
}
/**
* Initialize this <code>LoginModule</code> with the specified
* configuration information.
*
* @param subject The <code>Subject</code> to be authenticated
* @param callbackHandler A <code>CallbackHandler</code> for communicating
* with the end user as necessary
* @param sharedState State information shared with other
* <code>LoginModule</code> instances
* @param options Configuration information for this specific
* <code>LoginModule</code> instance
*/
@Override
public void initialize(Subject subject, CallbackHandler callbackHandler,
Map<String,?> sharedState, Map<String,?> options) {
log.debug("Init");
// Save configuration values
this.subject = subject;
this.callbackHandler = callbackHandler;
this.sharedState = sharedState;
this.options = options;
// Perform instance-specific initialization
if (options.get("pathname") != null)
this.pathname = (String) options.get("pathname");
// Load our defined Principals
load();
}
/**
* Phase 1 of authenticating a <code>Subject</code>.
*
* @return <code>true</code> if the authentication succeeded, or
* <code>false</code> if this <code>LoginModule</code> should be
* ignored
*
* @exception LoginException if the authentication fails
*/
@Override
public boolean login() throws LoginException {
// Set up our CallbackHandler requests
if (callbackHandler == null)
throw new LoginException("No CallbackHandler specified");
Callback callbacks[] = new Callback[9];
callbacks[0] = new NameCallback("Username: ");
callbacks[1] = new PasswordCallback("Password: ", false);
callbacks[2] = new TextInputCallback("nonce");
callbacks[3] = new TextInputCallback("nc");
callbacks[4] = new TextInputCallback("cnonce");
callbacks[5] = new TextInputCallback("qop");
callbacks[6] = new TextInputCallback("realmName");
callbacks[7] = new TextInputCallback("md5a2");
callbacks[8] = new TextInputCallback("authMethod");
// Interact with the user to retrieve the username and password
String username = null;
String password = null;
String nonce = null;
String nc = null;
String cnonce = null;
String qop = null;
String realmName = null;
String md5a2 = null;
String authMethod = null;
try {
callbackHandler.handle(callbacks);
username = ((NameCallback) callbacks[0]).getName();
password =
new String(((PasswordCallback) callbacks[1]).getPassword());
nonce = ((TextInputCallback) callbacks[2]).getText();
nc = ((TextInputCallback) callbacks[3]).getText();
cnonce = ((TextInputCallback) callbacks[4]).getText();
qop = ((TextInputCallback) callbacks[5]).getText();
realmName = ((TextInputCallback) callbacks[6]).getText();
md5a2 = ((TextInputCallback) callbacks[7]).getText();
authMethod = ((TextInputCallback) callbacks[8]).getText();
} catch (IOException e) {
throw new LoginException(e.toString());
} catch (UnsupportedCallbackException e) {
throw new LoginException(e.toString());
}
// Validate the username and password we have received
if (authMethod == null) {
// BASIC or FORM
principal = super.authenticate(username, password);
} else if (authMethod.equals(HttpServletRequest.DIGEST_AUTH)) {
principal = super.authenticate(username, password, nonce, nc,
cnonce, qop, realmName, md5a2);
} else if (authMethod.equals(HttpServletRequest.CLIENT_CERT_AUTH)) {
principal = super.getPrincipal(username);
} else {
throw new LoginException("Unknown authentication method");
}
log.debug("login " + username + " " + principal);
// Report results based on success or failure
if (principal != null) {
return (true);
} else {
throw new
FailedLoginException("Username or password is incorrect");
}
}
/**
* Log out this user.
*
* @return <code>true</code> in all cases because the
* <code>LoginModule</code> should not be ignored
*
* @exception LoginException if logging out failed
*/
@Override
public boolean logout() throws LoginException {
subject.getPrincipals().remove(principal);
committed = false;
principal = null;
return (true);
}
// ---------------------------------------------------------- Realm Methods
// ------------------------------------------------------ Protected Methods
/**
* Load the contents of our configuration file.
*/
protected void load() {
// Validate the existence of our configuration file
File file = new File(pathname);
if (!file.isAbsolute()) {
String catalinaBase = getCatalinaBase();
if (catalinaBase == null) {
log.warn("Unable to determine Catalina base to load file " + pathname);
return;
} else {
file = new File(catalinaBase, pathname);
}
}
if (!file.exists() || !file.canRead()) {
log.warn("Cannot load configuration file " + file.getAbsolutePath());
return;
}
// Load the contents of our configuration file
Digester digester = new Digester();
digester.setValidating(false);
digester.addRuleSet(new MemoryRuleSet());
try {
digester.push(this);
digester.parse(file);
} catch (Exception e) {
log.warn("Error processing configuration file " +
file.getAbsolutePath(), e);
return;
} finally {
digester.reset();
}
}
private String getCatalinaBase() {
// Have to get this via a callback as that is the only link we have back
// to the defining Realm. Can't use the system property as that may not
// be set/correct in an embedded scenario
if (callbackHandler == null) {
return null;
}
Callback callbacks[] = new Callback[1];
callbacks[0] = new TextInputCallback("catalinaBase");
String result = null;
try {
callbackHandler.handle(callbacks);
result = ((TextInputCallback) callbacks[0]).getText();
} catch (IOException | UnsupportedCallbackException e) {
return null;
}
return result;
}
}
|
|
package com.tectonica.api.v1;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Map.Entry;
import javax.inject.Inject;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.apphosting.api.ApiProxy;
import com.google.apphosting.api.ApiProxy.Environment;
import com.google.inject.servlet.RequestScoped;
import com.tectonica.engine.MyIntf;
@Path("/info")
@RequestScoped
public class InfoAPI
{
private static final String TITLE = "Information Page";
private static final String H1_TITLE = "Information Page";
private static final String STYLE = createStyle();
private static final String POWERED_BY_URL = "http://www.tectonica.co.il";
private static final String POWERED_BY_NAME = "tectonica.co.il";
@Inject
private HttpServletRequest request; // the reason for the @RequestScoped
@Inject
private MyIntf myImpl;
/**
* Returns environment information (headers, cookies, JVM, OS variables, etc)
*
* @summary get environment
* @return HTML page with environment information
*/
@GET
@Produces("text/html")
public String getEnvironmentalHtml()
{
myImpl.foo(); // makes sure injection succeeded
StringBuilder sb = new StringBuilder();
String timeStamp = new Date().toString();
try
{
sb.append("<h1>").append(H1_TITLE).append("</h1>");
sb.append(timeStamp).append("<hr/>");
printHeaders(sb);
printCookies(sb);
printJvmProperties(sb);
printGaeProperties(sb);
printEnvironment(sb);
sb.append("<p> </p><hr/>");
sb.append("Powered by <a href='").append(POWERED_BY_URL).append("'>").append(POWERED_BY_NAME).append("</a>");
}
catch (Exception e)
{
sb.append("<pre style='color:red'>Error:\n").append(e.getMessage()).append("\n").append(timeStamp).append("</pre>");
e.printStackTrace();
}
return createHtml(TITLE, STYLE, sb.toString());
}
private static String createStyle()
{
StringBuilder sb = new StringBuilder();
sb.append("article,body,div,figure,form,h1,h2,h3,h4,html,img,label,li,nav,p,span,strong,ul{font-size:100%;vertical-align:baseline;margin:0;padding:0;outline:0;border:0;background:0 0}");
sb.append("html{overflow-y:scroll;overflow-x:hidden}");
sb.append("body{font-family:'Proxima Nova Regular','Segoe UI',Roboto,'Droid Sans','Helvetica Neue',Arial,sans-serif;font-style:normal;font-weight:400;padding:10px;overflow:hidden;line-height:1.46em;color:#333}");
sb.append("h1,h2{font-family:'Skolar Bold','Segoe UI Bold','Roboto Slab','Droid Serif',AvenirNext-Bold,'Avenir Bold',Georgia,'Times New Roman',Times,serif;font-weight:700;font-style:normal}");
sb.append("h1{font-size:2em;line-height:1.1em;padding-top:.5em}");
sb.append("h2{font-size:1.5em;line-height:1.3em;padding:1.5em 0 .5em}");
sb.append("table{font-family:'Lucida Sans Unicode','Lucida Grande',Sans-Serif;font-size:12px;text-align:left;border-collapse:collapse;margin:10px;max-width:1200px}");
sb.append("th{font-weight:400;font-size:13px;color:#039;background:#b9c9fe;padding:8px}");
sb.append("td{background:#e8edff;border-top:1px solid #fff;color:#669;padding:8px;word-break:break-all;min-width:250px}");
sb.append("tbody tr:hover td{background:#d0dafd}");
return sb.toString();
}
private String createHtml(String title, String style, String body)
{
return String.format("<html><head><title>%s</title><style>%s</style></head><body>%s</body></html>", title, style, body);
}
@SuppressWarnings("unchecked")
private void printHeaders(StringBuilder sb)
{
sb.append("<h2>Headers</h2>");
Enumeration<String> headerNames = request.getHeaderNames();
sb.append("<table><tr><th>Name</th><th>Value</th></tr>");
while (headerNames.hasMoreElements())
{
String headerName = headerNames.nextElement();
sb.append("<tr><td>").append(headerName).append("</td><td>");
Enumeration<String> headerValues = request.getHeaders(headerName);
boolean isFirst = true;
while (headerValues.hasMoreElements())
{
String headerValue = headerValues.nextElement();
if (!isFirst)
sb.append("</td></tr><tr><td> </td><td>");
sb.append(headerValue);
isFirst = false;
}
sb.append("</td></tr>");
}
sb.append("</table>");
// General details
sb.append("<p>Protocol = ").append(request.getProtocol()).append("</p>");
sb.append("<p>Scheme = ").append(request.getScheme()).append("</p>");
sb.append("<p>isSecure = ").append(request.isSecure()).append("</p>");
sb.append("<p>Remote Address = ").append(request.getRemoteAddr()).append("</p>");
}
private void printCookies(StringBuilder sb)
{
sb.append("<h2>Cookies</h2>");
Cookie[] cookies = request.getCookies();
if (cookies != null)
{
sb.append("<p>").append(cookies.length).append(" cookies found</p>");
sb.append("<table><tr><th>Name</th><th>Value</th></tr>");
for (Cookie cookie : cookies)
sb.append("<tr><td>").append(cookie.getName()).append("</td><td>").append(cookie.getValue()).append("</td></tr>");
}
else
sb.append("<p>No cookies found</p>");
sb.append("</table>");
}
private void printJvmProperties(StringBuilder sb)
{
sb.append("<h2>JVM Properties</h2>");
Iterator<Entry<Object, Object>> iter = System.getProperties().entrySet().iterator();
sb.append("<table><tr><th>Name</th><th>Value</th></tr>");
while (iter.hasNext())
{
Entry<Object, Object> entry = iter.next();
String paramName = entry.getKey().toString();
String paramValue = entry.getValue().toString();
sb.append("<tr><td>").append(paramName).append("</td><td>").append(paramValue).append("</td></tr>");
}
sb.append("</table>");
}
private void printGaeProperties(StringBuilder sb)
{
sb.append("<h2>GAE Properties</h2>");
Environment env = ApiProxy.getCurrentEnvironment();
Iterator<Entry<String, Object>> iter = env.getAttributes().entrySet().iterator();
sb.append("<table><tr><th>Name</th><th>Value</th></tr>");
sb.append("<tr><td>").append("getAppId()").append("</td><td>").append(env.getAppId()).append("</td></tr>");
sb.append("<tr><td>").append("getVersionId()").append("</td><td>").append(env.getVersionId()).append("</td></tr>");
sb.append("<tr><td>").append("getModuleId()").append("</td><td>").append(env.getModuleId()).append("</td></tr>");
sb.append("<tr><td>").append("getAuthDomain()").append("</td><td>").append(env.getAuthDomain()).append("</td></tr>");
sb.append("<tr><td>").append("getEmail()").append("</td><td>").append(env.getEmail()).append("</td></tr>");
UserService userService = UserServiceFactory.getUserService();
boolean userLoggedIn = userService.isUserLoggedIn();
sb.append("<tr><td>").append("isUserLoggedIn()").append("</td><td>").append(userLoggedIn).append("</td></tr>");
if (userLoggedIn)
{
String logoutUrl = "Click <a href=\"" + userService.createLogoutURL(request.getRequestURI()) + "\">here</a> to Logout";
sb.append("<tr><td>").append("createLogoutURL()").append("</td><td>").append(logoutUrl).append("</td></tr>");
}
else
{
String loginUrl = "Click <a href=\"" + userService.createLoginURL(request.getRequestURI()) + "\">here</a> to Login";
sb.append("<tr><td>").append("createLoginURL()").append("</td><td>").append(loginUrl).append("</td></tr>");
}
while (iter.hasNext())
{
Entry<String, Object> entry = iter.next();
String paramName = entry.getKey();
String paramValue = entry.getValue().toString();
sb.append("<tr><td>").append(paramName).append("</td><td>").append(paramValue).append("</td></tr>");
}
sb.append("</table>");
}
private void printEnvironment(StringBuilder sb)
{
sb.append("<h2>Environment Variables</h2>");
Iterator<Entry<String, String>> iter = System.getenv().entrySet().iterator();
sb.append("<table><tr><th>Name</th><th>Value</th></tr>");
while (iter.hasNext())
{
Entry<String, String> entry = iter.next();
String paramName = entry.getKey();
String paramValue = entry.getValue();
sb.append("<tr><td>").append(paramName).append("</td><td>").append(paramValue).append("</td></tr>");
}
sb.append("</table>");
}
}
|
|
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.spdy;
import io.netty.buffer.ByteBuf;
import io.netty.util.CharsetUtil;
final class SpdyCodecUtil {
static final int SPDY_HEADER_TYPE_OFFSET = 2;
static final int SPDY_HEADER_FLAGS_OFFSET = 4;
static final int SPDY_HEADER_LENGTH_OFFSET = 5;
static final int SPDY_HEADER_SIZE = 8;
static final int SPDY_MAX_LENGTH = 0xFFFFFF; // Length is a 24-bit field
static final byte SPDY_DATA_FLAG_FIN = 0x01;
static final int SPDY_SYN_STREAM_FRAME = 1;
static final int SPDY_SYN_REPLY_FRAME = 2;
static final int SPDY_RST_STREAM_FRAME = 3;
static final int SPDY_SETTINGS_FRAME = 4;
static final int SPDY_NOOP_FRAME = 5;
static final int SPDY_PING_FRAME = 6;
static final int SPDY_GOAWAY_FRAME = 7;
static final int SPDY_HEADERS_FRAME = 8;
static final int SPDY_WINDOW_UPDATE_FRAME = 9;
static final int SPDY_CREDENTIAL_FRAME = 10;
static final byte SPDY_FLAG_FIN = 0x01;
static final byte SPDY_FLAG_UNIDIRECTIONAL = 0x02;
static final byte SPDY_SETTINGS_CLEAR = 0x01;
static final byte SPDY_SETTINGS_PERSIST_VALUE = 0x01;
static final byte SPDY_SETTINGS_PERSISTED = 0x02;
static final int SPDY_SETTINGS_MAX_ID = 0xFFFFFF; // ID is a 24-bit field
static final int SPDY_MAX_NV_LENGTH = 0xFFFF; // Length is a 16-bit field
// Zlib Dictionary
static final byte[] SPDY_DICT = {
0x00, 0x00, 0x00, 0x07, 0x6f, 0x70, 0x74, 0x69, // - - - - o p t i
0x6f, 0x6e, 0x73, 0x00, 0x00, 0x00, 0x04, 0x68, // o n s - - - - h
0x65, 0x61, 0x64, 0x00, 0x00, 0x00, 0x04, 0x70, // e a d - - - - p
0x6f, 0x73, 0x74, 0x00, 0x00, 0x00, 0x03, 0x70, // o s t - - - - p
0x75, 0x74, 0x00, 0x00, 0x00, 0x06, 0x64, 0x65, // u t - - - - d e
0x6c, 0x65, 0x74, 0x65, 0x00, 0x00, 0x00, 0x05, // l e t e - - - -
0x74, 0x72, 0x61, 0x63, 0x65, 0x00, 0x00, 0x00, // t r a c e - - -
0x06, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x00, // - a c c e p t -
0x00, 0x00, 0x0e, 0x61, 0x63, 0x63, 0x65, 0x70, // - - - a c c e p
0x74, 0x2d, 0x63, 0x68, 0x61, 0x72, 0x73, 0x65, // t - c h a r s e
0x74, 0x00, 0x00, 0x00, 0x0f, 0x61, 0x63, 0x63, // t - - - - a c c
0x65, 0x70, 0x74, 0x2d, 0x65, 0x6e, 0x63, 0x6f, // e p t - e n c o
0x64, 0x69, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x0f, // d i n g - - - -
0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x2d, 0x6c, // a c c e p t - l
0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x00, // a n g u a g e -
0x00, 0x00, 0x0d, 0x61, 0x63, 0x63, 0x65, 0x70, // - - - a c c e p
0x74, 0x2d, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x73, // t - r a n g e s
0x00, 0x00, 0x00, 0x03, 0x61, 0x67, 0x65, 0x00, // - - - - a g e -
0x00, 0x00, 0x05, 0x61, 0x6c, 0x6c, 0x6f, 0x77, // - - - a l l o w
0x00, 0x00, 0x00, 0x0d, 0x61, 0x75, 0x74, 0x68, // - - - - a u t h
0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, // o r i z a t i o
0x6e, 0x00, 0x00, 0x00, 0x0d, 0x63, 0x61, 0x63, // n - - - - c a c
0x68, 0x65, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, // h e - c o n t r
0x6f, 0x6c, 0x00, 0x00, 0x00, 0x0a, 0x63, 0x6f, // o l - - - - c o
0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, // n n e c t i o n
0x00, 0x00, 0x00, 0x0c, 0x63, 0x6f, 0x6e, 0x74, // - - - - c o n t
0x65, 0x6e, 0x74, 0x2d, 0x62, 0x61, 0x73, 0x65, // e n t - b a s e
0x00, 0x00, 0x00, 0x10, 0x63, 0x6f, 0x6e, 0x74, // - - - - c o n t
0x65, 0x6e, 0x74, 0x2d, 0x65, 0x6e, 0x63, 0x6f, // e n t - e n c o
0x64, 0x69, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x10, // d i n g - - - -
0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2d, // c o n t e n t -
0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, // l a n g u a g e
0x00, 0x00, 0x00, 0x0e, 0x63, 0x6f, 0x6e, 0x74, // - - - - c o n t
0x65, 0x6e, 0x74, 0x2d, 0x6c, 0x65, 0x6e, 0x67, // e n t - l e n g
0x74, 0x68, 0x00, 0x00, 0x00, 0x10, 0x63, 0x6f, // t h - - - - c o
0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2d, 0x6c, 0x6f, // n t e n t - l o
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x00, 0x00, // c a t i o n - -
0x00, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, // - - c o n t e n
0x74, 0x2d, 0x6d, 0x64, 0x35, 0x00, 0x00, 0x00, // t - m d 5 - - -
0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, // - c o n t e n t
0x2d, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x00, 0x00, // - r a n g e - -
0x00, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, // - - c o n t e n
0x74, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x00, 0x00, // t - t y p e - -
0x00, 0x04, 0x64, 0x61, 0x74, 0x65, 0x00, 0x00, // - - d a t e - -
0x00, 0x04, 0x65, 0x74, 0x61, 0x67, 0x00, 0x00, // - - e t a g - -
0x00, 0x06, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, // - - e x p e c t
0x00, 0x00, 0x00, 0x07, 0x65, 0x78, 0x70, 0x69, // - - - - e x p i
0x72, 0x65, 0x73, 0x00, 0x00, 0x00, 0x04, 0x66, // r e s - - - - f
0x72, 0x6f, 0x6d, 0x00, 0x00, 0x00, 0x04, 0x68, // r o m - - - - h
0x6f, 0x73, 0x74, 0x00, 0x00, 0x00, 0x08, 0x69, // o s t - - - - i
0x66, 0x2d, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x00, // f - m a t c h -
0x00, 0x00, 0x11, 0x69, 0x66, 0x2d, 0x6d, 0x6f, // - - - i f - m o
0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x2d, 0x73, // d i f i e d - s
0x69, 0x6e, 0x63, 0x65, 0x00, 0x00, 0x00, 0x0d, // i n c e - - - -
0x69, 0x66, 0x2d, 0x6e, 0x6f, 0x6e, 0x65, 0x2d, // i f - n o n e -
0x6d, 0x61, 0x74, 0x63, 0x68, 0x00, 0x00, 0x00, // m a t c h - - -
0x08, 0x69, 0x66, 0x2d, 0x72, 0x61, 0x6e, 0x67, // - i f - r a n g
0x65, 0x00, 0x00, 0x00, 0x13, 0x69, 0x66, 0x2d, // e - - - - i f -
0x75, 0x6e, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, // u n m o d i f i
0x65, 0x64, 0x2d, 0x73, 0x69, 0x6e, 0x63, 0x65, // e d - s i n c e
0x00, 0x00, 0x00, 0x0d, 0x6c, 0x61, 0x73, 0x74, // - - - - l a s t
0x2d, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, // - m o d i f i e
0x64, 0x00, 0x00, 0x00, 0x08, 0x6c, 0x6f, 0x63, // d - - - - l o c
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x00, 0x00, 0x00, // a t i o n - - -
0x0c, 0x6d, 0x61, 0x78, 0x2d, 0x66, 0x6f, 0x72, // - m a x - f o r
0x77, 0x61, 0x72, 0x64, 0x73, 0x00, 0x00, 0x00, // w a r d s - - -
0x06, 0x70, 0x72, 0x61, 0x67, 0x6d, 0x61, 0x00, // - p r a g m a -
0x00, 0x00, 0x12, 0x70, 0x72, 0x6f, 0x78, 0x79, // - - - p r o x y
0x2d, 0x61, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, // - a u t h e n t
0x69, 0x63, 0x61, 0x74, 0x65, 0x00, 0x00, 0x00, // i c a t e - - -
0x13, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2d, 0x61, // - p r o x y - a
0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, // u t h o r i z a
0x74, 0x69, 0x6f, 0x6e, 0x00, 0x00, 0x00, 0x05, // t i o n - - - -
0x72, 0x61, 0x6e, 0x67, 0x65, 0x00, 0x00, 0x00, // r a n g e - - -
0x07, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x72, // - r e f e r e r
0x00, 0x00, 0x00, 0x0b, 0x72, 0x65, 0x74, 0x72, // - - - - r e t r
0x79, 0x2d, 0x61, 0x66, 0x74, 0x65, 0x72, 0x00, // y - a f t e r -
0x00, 0x00, 0x06, 0x73, 0x65, 0x72, 0x76, 0x65, // - - - s e r v e
0x72, 0x00, 0x00, 0x00, 0x02, 0x74, 0x65, 0x00, // r - - - - t e -
0x00, 0x00, 0x07, 0x74, 0x72, 0x61, 0x69, 0x6c, // - - - t r a i l
0x65, 0x72, 0x00, 0x00, 0x00, 0x11, 0x74, 0x72, // e r - - - - t r
0x61, 0x6e, 0x73, 0x66, 0x65, 0x72, 0x2d, 0x65, // a n s f e r - e
0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x00, // n c o d i n g -
0x00, 0x00, 0x07, 0x75, 0x70, 0x67, 0x72, 0x61, // - - - u p g r a
0x64, 0x65, 0x00, 0x00, 0x00, 0x0a, 0x75, 0x73, // d e - - - - u s
0x65, 0x72, 0x2d, 0x61, 0x67, 0x65, 0x6e, 0x74, // e r - a g e n t
0x00, 0x00, 0x00, 0x04, 0x76, 0x61, 0x72, 0x79, // - - - - v a r y
0x00, 0x00, 0x00, 0x03, 0x76, 0x69, 0x61, 0x00, // - - - - v i a -
0x00, 0x00, 0x07, 0x77, 0x61, 0x72, 0x6e, 0x69, // - - - w a r n i
0x6e, 0x67, 0x00, 0x00, 0x00, 0x10, 0x77, 0x77, // n g - - - - w w
0x77, 0x2d, 0x61, 0x75, 0x74, 0x68, 0x65, 0x6e, // w - a u t h e n
0x74, 0x69, 0x63, 0x61, 0x74, 0x65, 0x00, 0x00, // t i c a t e - -
0x00, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, // - - m e t h o d
0x00, 0x00, 0x00, 0x03, 0x67, 0x65, 0x74, 0x00, // - - - - g e t -
0x00, 0x00, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, // - - - s t a t u
0x73, 0x00, 0x00, 0x00, 0x06, 0x32, 0x30, 0x30, // s - - - - 2 0 0
0x20, 0x4f, 0x4b, 0x00, 0x00, 0x00, 0x07, 0x76, // - O K - - - - v
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x00, 0x00, // e r s i o n - -
0x00, 0x08, 0x48, 0x54, 0x54, 0x50, 0x2f, 0x31, // - - H T T P - 1
0x2e, 0x31, 0x00, 0x00, 0x00, 0x03, 0x75, 0x72, // - 1 - - - - u r
0x6c, 0x00, 0x00, 0x00, 0x06, 0x70, 0x75, 0x62, // l - - - - p u b
0x6c, 0x69, 0x63, 0x00, 0x00, 0x00, 0x0a, 0x73, // l i c - - - - s
0x65, 0x74, 0x2d, 0x63, 0x6f, 0x6f, 0x6b, 0x69, // e t - c o o k i
0x65, 0x00, 0x00, 0x00, 0x0a, 0x6b, 0x65, 0x65, // e - - - - k e e
0x70, 0x2d, 0x61, 0x6c, 0x69, 0x76, 0x65, 0x00, // p - a l i v e -
0x00, 0x00, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, // - - - o r i g i
0x6e, 0x31, 0x30, 0x30, 0x31, 0x30, 0x31, 0x32, // n 1 0 0 1 0 1 2
0x30, 0x31, 0x32, 0x30, 0x32, 0x32, 0x30, 0x35, // 0 1 2 0 2 2 0 5
0x32, 0x30, 0x36, 0x33, 0x30, 0x30, 0x33, 0x30, // 2 0 6 3 0 0 3 0
0x32, 0x33, 0x30, 0x33, 0x33, 0x30, 0x34, 0x33, // 2 3 0 3 3 0 4 3
0x30, 0x35, 0x33, 0x30, 0x36, 0x33, 0x30, 0x37, // 0 5 3 0 6 3 0 7
0x34, 0x30, 0x32, 0x34, 0x30, 0x35, 0x34, 0x30, // 4 0 2 4 0 5 4 0
0x36, 0x34, 0x30, 0x37, 0x34, 0x30, 0x38, 0x34, // 6 4 0 7 4 0 8 4
0x30, 0x39, 0x34, 0x31, 0x30, 0x34, 0x31, 0x31, // 0 9 4 1 0 4 1 1
0x34, 0x31, 0x32, 0x34, 0x31, 0x33, 0x34, 0x31, // 4 1 2 4 1 3 4 1
0x34, 0x34, 0x31, 0x35, 0x34, 0x31, 0x36, 0x34, // 4 4 1 5 4 1 6 4
0x31, 0x37, 0x35, 0x30, 0x32, 0x35, 0x30, 0x34, // 1 7 5 0 2 5 0 4
0x35, 0x30, 0x35, 0x32, 0x30, 0x33, 0x20, 0x4e, // 5 0 5 2 0 3 - N
0x6f, 0x6e, 0x2d, 0x41, 0x75, 0x74, 0x68, 0x6f, // o n - A u t h o
0x72, 0x69, 0x74, 0x61, 0x74, 0x69, 0x76, 0x65, // r i t a t i v e
0x20, 0x49, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, // - I n f o r m a
0x74, 0x69, 0x6f, 0x6e, 0x32, 0x30, 0x34, 0x20, // t i o n 2 0 4 -
0x4e, 0x6f, 0x20, 0x43, 0x6f, 0x6e, 0x74, 0x65, // N o - C o n t e
0x6e, 0x74, 0x33, 0x30, 0x31, 0x20, 0x4d, 0x6f, // n t 3 0 1 - M o
0x76, 0x65, 0x64, 0x20, 0x50, 0x65, 0x72, 0x6d, // v e d - P e r m
0x61, 0x6e, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x34, // a n e n t l y 4
0x30, 0x30, 0x20, 0x42, 0x61, 0x64, 0x20, 0x52, // 0 0 - B a d - R
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x34, 0x30, // e q u e s t 4 0
0x31, 0x20, 0x55, 0x6e, 0x61, 0x75, 0x74, 0x68, // 1 - U n a u t h
0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x34, 0x30, // o r i z e d 4 0
0x33, 0x20, 0x46, 0x6f, 0x72, 0x62, 0x69, 0x64, // 3 - F o r b i d
0x64, 0x65, 0x6e, 0x34, 0x30, 0x34, 0x20, 0x4e, // d e n 4 0 4 - N
0x6f, 0x74, 0x20, 0x46, 0x6f, 0x75, 0x6e, 0x64, // o t - F o u n d
0x35, 0x30, 0x30, 0x20, 0x49, 0x6e, 0x74, 0x65, // 5 0 0 - I n t e
0x72, 0x6e, 0x61, 0x6c, 0x20, 0x53, 0x65, 0x72, // r n a l - S e r
0x76, 0x65, 0x72, 0x20, 0x45, 0x72, 0x72, 0x6f, // v e r - E r r o
0x72, 0x35, 0x30, 0x31, 0x20, 0x4e, 0x6f, 0x74, // r 5 0 1 - N o t
0x20, 0x49, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, // - I m p l e m e
0x6e, 0x74, 0x65, 0x64, 0x35, 0x30, 0x33, 0x20, // n t e d 5 0 3 -
0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x20, // S e r v i c e -
0x55, 0x6e, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, // U n a v a i l a
0x62, 0x6c, 0x65, 0x4a, 0x61, 0x6e, 0x20, 0x46, // b l e J a n - F
0x65, 0x62, 0x20, 0x4d, 0x61, 0x72, 0x20, 0x41, // e b - M a r - A
0x70, 0x72, 0x20, 0x4d, 0x61, 0x79, 0x20, 0x4a, // p r - M a y - J
0x75, 0x6e, 0x20, 0x4a, 0x75, 0x6c, 0x20, 0x41, // u n - J u l - A
0x75, 0x67, 0x20, 0x53, 0x65, 0x70, 0x74, 0x20, // u g - S e p t -
0x4f, 0x63, 0x74, 0x20, 0x4e, 0x6f, 0x76, 0x20, // O c t - N o v -
0x44, 0x65, 0x63, 0x20, 0x30, 0x30, 0x3a, 0x30, // D e c - 0 0 - 0
0x30, 0x3a, 0x30, 0x30, 0x20, 0x4d, 0x6f, 0x6e, // 0 - 0 0 - M o n
0x2c, 0x20, 0x54, 0x75, 0x65, 0x2c, 0x20, 0x57, // - - T u e - - W
0x65, 0x64, 0x2c, 0x20, 0x54, 0x68, 0x75, 0x2c, // e d - - T h u -
0x20, 0x46, 0x72, 0x69, 0x2c, 0x20, 0x53, 0x61, // - F r i - - S a
0x74, 0x2c, 0x20, 0x53, 0x75, 0x6e, 0x2c, 0x20, // t - - S u n - -
0x47, 0x4d, 0x54, 0x63, 0x68, 0x75, 0x6e, 0x6b, // G M T c h u n k
0x65, 0x64, 0x2c, 0x74, 0x65, 0x78, 0x74, 0x2f, // e d - t e x t -
0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x69, 0x6d, 0x61, // h t m l - i m a
0x67, 0x65, 0x2f, 0x70, 0x6e, 0x67, 0x2c, 0x69, // g e - p n g - i
0x6d, 0x61, 0x67, 0x65, 0x2f, 0x6a, 0x70, 0x67, // m a g e - j p g
0x2c, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x67, // - i m a g e - g
0x69, 0x66, 0x2c, 0x61, 0x70, 0x70, 0x6c, 0x69, // i f - a p p l i
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x78, // c a t i o n - x
0x6d, 0x6c, 0x2c, 0x61, 0x70, 0x70, 0x6c, 0x69, // m l - a p p l i
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x78, // c a t i o n - x
0x68, 0x74, 0x6d, 0x6c, 0x2b, 0x78, 0x6d, 0x6c, // h t m l - x m l
0x2c, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x70, 0x6c, // - t e x t - p l
0x61, 0x69, 0x6e, 0x2c, 0x74, 0x65, 0x78, 0x74, // a i n - t e x t
0x2f, 0x6a, 0x61, 0x76, 0x61, 0x73, 0x63, 0x72, // - j a v a s c r
0x69, 0x70, 0x74, 0x2c, 0x70, 0x75, 0x62, 0x6c, // i p t - p u b l
0x69, 0x63, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, // i c p r i v a t
0x65, 0x6d, 0x61, 0x78, 0x2d, 0x61, 0x67, 0x65, // e m a x - a g e
0x3d, 0x67, 0x7a, 0x69, 0x70, 0x2c, 0x64, 0x65, // - g z i p - d e
0x66, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x73, 0x64, // f l a t e - s d
0x63, 0x68, 0x63, 0x68, 0x61, 0x72, 0x73, 0x65, // c h c h a r s e
0x74, 0x3d, 0x75, 0x74, 0x66, 0x2d, 0x38, 0x63, // t - u t f - 8 c
0x68, 0x61, 0x72, 0x73, 0x65, 0x74, 0x3d, 0x69, // h a r s e t - i
0x73, 0x6f, 0x2d, 0x38, 0x38, 0x35, 0x39, 0x2d, // s o - 8 8 5 9 -
0x31, 0x2c, 0x75, 0x74, 0x66, 0x2d, 0x2c, 0x2a, // 1 - u t f - - -
0x2c, 0x65, 0x6e, 0x71, 0x3d, 0x30, 0x2e // - e n q - 0 -
};
private static final String SPDY2_DICT_S =
"optionsgetheadpostputdeletetraceacceptaccept-charsetaccept-encodingaccept-" +
"languageauthorizationexpectfromhostif-modified-sinceif-matchif-none-matchi" +
"f-rangeif-unmodifiedsincemax-forwardsproxy-authorizationrangerefererteuser" +
"-agent10010120020120220320420520630030130230330430530630740040140240340440" +
"5406407408409410411412413414415416417500501502503504505accept-rangesageeta" +
"glocationproxy-authenticatepublicretry-afterservervarywarningwww-authentic" +
"ateallowcontent-basecontent-encodingcache-controlconnectiondatetrailertran" +
"sfer-encodingupgradeviawarningcontent-languagecontent-lengthcontent-locati" +
"oncontent-md5content-rangecontent-typeetagexpireslast-modifiedset-cookieMo" +
"ndayTuesdayWednesdayThursdayFridaySaturdaySundayJanFebMarAprMayJunJulAugSe" +
"pOctNovDecchunkedtext/htmlimage/pngimage/jpgimage/gifapplication/xmlapplic" +
"ation/xhtmltext/plainpublicmax-agecharset=iso-8859-1utf-8gzipdeflateHTTP/1" +
".1statusversionurl ";
static final byte[] SPDY2_DICT;
static {
byte[] SPDY2_DICT_;
try {
SPDY2_DICT_ = SPDY2_DICT_S.getBytes(CharsetUtil.US_ASCII);
// dictionary is null terminated
SPDY2_DICT_[SPDY2_DICT_.length - 1] = 0;
} catch (Exception e) {
SPDY2_DICT_ = new byte[1];
}
SPDY2_DICT = SPDY2_DICT_;
}
private SpdyCodecUtil() {
}
/**
* Reads a big-endian unsigned short integer from the buffer.
*/
static int getUnsignedShort(ByteBuf buf, int offset) {
return (buf.getByte(offset) & 0xFF) << 8 |
buf.getByte(offset + 1) & 0xFF;
}
/**
* Reads a big-endian unsigned medium integer from the buffer.
*/
static int getUnsignedMedium(ByteBuf buf, int offset) {
return (buf.getByte(offset) & 0xFF) << 16 |
(buf.getByte(offset + 1) & 0xFF) << 8 |
buf.getByte(offset + 2) & 0xFF;
}
/**
* Reads a big-endian (31-bit) integer from the buffer.
*/
static int getUnsignedInt(ByteBuf buf, int offset) {
return (buf.getByte(offset) & 0x7F) << 24 |
(buf.getByte(offset + 1) & 0xFF) << 16 |
(buf.getByte(offset + 2) & 0xFF) << 8 |
buf.getByte(offset + 3) & 0xFF;
}
/**
* Reads a big-endian signed integer from the buffer.
*/
static int getSignedInt(ByteBuf buf, int offset) {
return (buf.getByte(offset) & 0xFF) << 24 |
(buf.getByte(offset + 1) & 0xFF) << 16 |
(buf.getByte(offset + 2) & 0xFF) << 8 |
buf.getByte(offset + 3) & 0xFF;
}
/**
* Returns {@code true} if ID is for a server initiated stream or ping.
*/
static boolean isServerId(int id) {
// Server initiated streams and pings have even IDs
return id % 2 == 0;
}
/**
* Validate a SPDY header name.
*/
static void validateHeaderName(String name) {
if (name == null) {
throw new NullPointerException("name");
}
if (name.isEmpty()) {
throw new IllegalArgumentException(
"name cannot be length zero");
}
// Since name may only contain ascii characters, for valid names
// name.length() returns the number of bytes when UTF-8 encoded.
if (name.length() > SPDY_MAX_NV_LENGTH) {
throw new IllegalArgumentException(
"name exceeds allowable length: " + name);
}
for (int i = 0; i < name.length(); i ++) {
char c = name.charAt(i);
if (c == 0) {
throw new IllegalArgumentException(
"name contains null character: " + name);
}
if (c > 127) {
throw new IllegalArgumentException(
"name contains non-ascii character: " + name);
}
}
}
/**
* Validate a SPDY header value. Does not validate max length.
*/
static void validateHeaderValue(String value) {
if (value == null) {
throw new NullPointerException("value");
}
for (int i = 0; i < value.length(); i ++) {
char c = value.charAt(i);
if (c == 0) {
throw new IllegalArgumentException(
"value contains null character: " + value);
}
}
}
}
|
|
package com.google.cloud.hadoop.io.bigquery;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Dataset;
import com.google.api.services.bigquery.model.DatasetReference;
import com.google.api.services.bigquery.model.Job;
import com.google.api.services.bigquery.model.JobConfiguration;
import com.google.api.services.bigquery.model.JobConfigurationTableCopy;
import com.google.api.services.bigquery.model.JobReference;
import com.google.api.services.bigquery.model.TableReference;
import com.google.cloud.hadoop.util.ApiErrorExtractor;
import com.google.cloud.hadoop.util.HadoopToStringUtil;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.security.GeneralSecurityException;
/**
* An OutputCommitter that commits tables specified in job output dataset in Bigquery. This is
* called before job start, after task completion, job completion, task cancellation, and job
* abortion.
*/
public class BigQueryOutputCommitter
extends OutputCommitter {
// Logger.
protected static final Logger LOG = LoggerFactory.getLogger(BigQueryOutputCommitter.class);
// Used for specialized handling of various API-defined exceptions.
private ApiErrorExtractor errorExtractor = new ApiErrorExtractor();
// Id of project used to describe the project under which all connector operations occur.
private String projectId;
// Fully-qualified id of the temporary table the connector writes into.
private TableReference tempTableRef;
// Fully-qualified id of the final destination table we desire the output to go to.
private TableReference finalTableRef;
// Wrapper around some Bigquery API methods and convenience methods.
private BigQueryHelper bigQueryHelper;
/**
* Creates a bigquery output committer.
*
* @param projectId the job's project id.
* @param tempTableRef the fully-qualified temp table to write to.
* @param finalTableRef the fully-qualified destination table on commit.
* @param configuration the task's configuration
* @throws IOException on IO Error.
*/
public BigQueryOutputCommitter(
String projectId, TableReference tempTableRef,
TableReference finalTableRef, Configuration configuration)
throws IOException {
this.projectId = projectId;
this.tempTableRef = tempTableRef;
this.finalTableRef = finalTableRef;
// Get Bigquery.
try {
BigQueryFactory bigQueryFactory = new BigQueryFactory();
this.bigQueryHelper = bigQueryFactory.getBigQueryHelper(configuration);
} catch (GeneralSecurityException e) {
LOG.error("Could not get Bigquery", e);
throw new IOException("Could not get Bigquery", e);
}
}
/**
* Creates the temporary dataset that will contain all of the task work tables.
*
* @param context the job's context.
* @throws IOException on IO Error.
*/
@Override
public void setupJob(JobContext context)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("setupJob({})", HadoopToStringUtil.toString(context));
}
// Create dataset.
DatasetReference datasetReference = new DatasetReference();
datasetReference.setProjectId(tempTableRef.getProjectId());
datasetReference.setDatasetId(tempTableRef.getDatasetId());
Dataset tempDataset = new Dataset();
tempDataset.setDatasetReference(datasetReference);
// Insert dataset into Bigquery.
Bigquery.Datasets datasets = bigQueryHelper.getRawBigquery().datasets();
// TODO(user): Maybe allow the dataset to exist already instead of throwing 409 here.
LOG.debug("Creating temporary dataset '{}' for project '{}'",
tempTableRef.getDatasetId(), tempTableRef.getProjectId());
// NB: Even though this "insert" makes it look like we can specify a different projectId than
// the one which owns the dataset, it actually has to match.
datasets.insert(tempTableRef.getProjectId(), tempDataset).execute();
}
/**
* Deletes the temporary dataset, including all of the work tables.
*
* @param context the job's context.
* @throws IOException
*/
@Override
public void cleanupJob(JobContext context)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("cleanupJob({})", HadoopToStringUtil.toString(context));
}
Bigquery.Datasets datasets = bigQueryHelper.getRawBigquery().datasets();
Configuration config = context.getConfiguration();
try {
LOG.debug("cleanupJob: Deleting dataset '{}' from project '{}'",
tempTableRef.getDatasetId(), tempTableRef.getProjectId());
datasets.delete(tempTableRef.getProjectId(), tempTableRef.getDatasetId())
.setDeleteContents(true)
.execute();
} catch (IOException e) {
// Error is swallowed as job has completed successfully and the only failure is deleting
// temporary data.
// This matches the FileOutputCommitter pattern.
LOG.warn("Could not delete dataset. Temporary data not cleaned up.", e);
}
}
/**
* For cleaning up the job's output after job failure.
*
* @param jobContext Context of the job whose output is being written.
* @param status Final run state of the job, should be JobStatus.KILLED or JobStatus.FAILED.
* @throws IOException on IO Error.
*/
public void abortJob(JobContext jobContext, int status)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("abortJob({}, {})", HadoopToStringUtil.toString(jobContext), status);
}
cleanupJob(jobContext);
}
/**
* For committing job's output after successful job completion. Note that this is invoked for jobs
* with final run state as JobStatus.SUCCEEDED.
*
* @param jobContext Context of the job whose output is being written.
* @throws IOException on IO Error.
*/
@Override
public void commitJob(JobContext jobContext)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("commitJob({})", HadoopToStringUtil.toString(jobContext));
}
cleanupJob(jobContext);
}
/**
* No task setup required.
*
* @throws IOException on IO Error.
*/
@Override
public void setupTask(TaskAttemptContext context)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("setupTask({})", HadoopToStringUtil.toString(context));
}
// BigQueryOutputCommitter's setupTask doesn't do anything. Because the
// temporary task table is created on demand when the
// task is writing.
}
/**
* Moves the files from the working dataset to the job output table.
*
* @param context the task context.
* @throws IOException on IO Error.
*/
@Override
public void commitTask(TaskAttemptContext context)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("commitTask({})", HadoopToStringUtil.toString(context));
}
// Create a table copy request object.
JobConfigurationTableCopy copyTableConfig = new JobConfigurationTableCopy();
// Set the table to get results from.
copyTableConfig.setSourceTable(tempTableRef);
// Set the table to put results into.
copyTableConfig.setDestinationTable(finalTableRef);
copyTableConfig.setWriteDisposition("WRITE_APPEND");
JobConfiguration config = new JobConfiguration();
config.setCopy(copyTableConfig);
JobReference jobReference = bigQueryHelper.createJobReference(
projectId, context.getTaskAttemptID().toString());
Job job = new Job();
job.setConfiguration(config);
job.setJobReference(jobReference);
// Run the job.
LOG.debug("commitTask: Running table copy from {} to {}",
BigQueryStrings.toString(tempTableRef), BigQueryStrings.toString(finalTableRef));
Job response = bigQueryHelper.insertJobOrFetchDuplicate(projectId, job);
LOG.debug("Got response '{}'", response);
// Poll until job is complete.
try {
BigQueryUtils.waitForJobCompletion(
bigQueryHelper.getRawBigquery(), projectId, jobReference, context);
} catch (InterruptedException e) {
LOG.error("Could not check if results of task were transfered.", e);
throw new IOException("Could not check if results of task were transfered.", e);
}
LOG.info("Saved output of task to table '{}' using project '{}'",
BigQueryStrings.toString(finalTableRef), projectId);
}
/**
* Deletes the work table.
*
* @param context the task's context.
*/
@Override
public void abortTask(TaskAttemptContext context) {
if (LOG.isDebugEnabled()) {
LOG.debug("abortTask({})", HadoopToStringUtil.toString(context));
}
// Cleanup of per-task temporary tables will be performed at job cleanup time.
}
/**
* Did this task write any files into the working dataset?
*
* @param context the task's context.
* @throws IOException on IO Error.
*/
@Override
public boolean needsTaskCommit(TaskAttemptContext context)
throws IOException {
return needsTaskCommit(context.getTaskAttemptID());
}
/**
* Did this task write any files into the working dataset?
*
* @param attemptId the task's context.
* @throws IOException on IO Error.
*/
@VisibleForTesting
public boolean needsTaskCommit(TaskAttemptID attemptId) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("needsTaskCommit({}) - tempTableRef: '{}'",
attemptId,
BigQueryStrings.toString(tempTableRef));
}
boolean tableExists = bigQueryHelper.tableExists(tempTableRef);
LOG.debug("needsTaskCommit -> {}", tableExists);
return tableExists;
}
/**
* Sets Bigquery for testing purposes.
*/
@VisibleForTesting
void setBigQueryHelper(BigQueryHelper helper) {
this.bigQueryHelper = helper;
}
@VisibleForTesting
void setErrorExtractor(ApiErrorExtractor errorExtractor) {
this.errorExtractor = errorExtractor;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.charfilter;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.CharFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.UnicodeUtil;
public class TestMappingCharFilter extends BaseTokenStreamTestCase {
NormalizeCharMap normMap;
@Override
public void setUp() throws Exception {
super.setUp();
NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
builder.add( "aa", "a" );
builder.add( "bbb", "b" );
builder.add( "cccc", "cc" );
builder.add( "h", "i" );
builder.add( "j", "jj" );
builder.add( "k", "kkk" );
builder.add( "ll", "llll" );
builder.add( "empty", "" );
// BMP (surrogate pair):
builder.add(UnicodeUtil.newString(new int[] {0x1D122}, 0, 1), "fclef");
builder.add("\uff01", "full-width-exclamation");
normMap = builder.build();
}
public void testReaderReset() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "x" ) );
char[] buf = new char[10];
int len = cs.read(buf, 0, 10);
assertEquals( 1, len );
assertEquals( 'x', buf[0]) ;
len = cs.read(buf, 0, 10);
assertEquals( -1, len );
// rewind
cs.reset();
len = cs.read(buf, 0, 10);
assertEquals( 1, len );
assertEquals( 'x', buf[0]) ;
}
public void testNothingChange() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "x" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"x"}, new int[]{0}, new int[]{1}, 1);
}
public void test1to1() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "h" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"i"}, new int[]{0}, new int[]{1}, 1);
}
public void test1to2() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "j" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"jj"}, new int[]{0}, new int[]{1}, 1);
}
public void test1to3() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "k" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"kkk"}, new int[]{0}, new int[]{1}, 1);
}
public void test2to4() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "ll" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"llll"}, new int[]{0}, new int[]{2}, 2);
}
public void test2to1() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "aa" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"a"}, new int[]{0}, new int[]{2}, 2);
}
public void test3to1() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "bbb" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"b"}, new int[]{0}, new int[]{3}, 3);
}
public void test4to2() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "cccc" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"cc"}, new int[]{0}, new int[]{4}, 4);
}
public void test5to0() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "empty" ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[0], new int[]{}, new int[]{}, 5);
}
public void testNonBMPChar() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( UnicodeUtil.newString(new int[] {0x1D122}, 0, 1) ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"fclef"}, new int[]{0}, new int[]{2}, 2);
}
public void testFullWidthChar() throws Exception {
CharFilter cs = new MappingCharFilter( normMap, new StringReader( "\uff01") );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts, new String[]{"full-width-exclamation"}, new int[]{0}, new int[]{1}, 1);
}
//
// 1111111111222
// 01234567890123456789012
//(in) h i j k ll cccc bbb aa
//
// 1111111111222
// 01234567890123456789012
//(out) i i jj kkk llll cc b a
//
// h, 0, 1 => i, 0, 1
// i, 2, 3 => i, 2, 3
// j, 4, 5 => jj, 4, 5
// k, 6, 7 => kkk, 6, 7
// ll, 8,10 => llll, 8,10
// cccc,11,15 => cc,11,15
// bbb,16,19 => b,16,19
// aa,20,22 => a,20,22
//
public void testTokenStream() throws Exception {
String testString = "h i j k ll cccc bbb aa";
CharFilter cs = new MappingCharFilter( normMap, new StringReader( testString ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts,
new String[]{"i","i","jj","kkk","llll","cc","b","a"},
new int[]{0,2,4,6,8,11,16,20},
new int[]{1,3,5,7,10,15,19,22},
testString.length()
);
}
//
//
// 0123456789
//(in) aaaa ll h
//(out-1) aa llll i
//(out-2) a llllllll i
//
// aaaa,0,4 => a,0,4
// ll,5,7 => llllllll,5,7
// h,8,9 => i,8,9
public void testChained() throws Exception {
String testString = "aaaa ll h";
CharFilter cs = new MappingCharFilter( normMap,
new MappingCharFilter( normMap, new StringReader( testString ) ) );
TokenStream ts =whitespaceMockTokenizer(cs);
assertTokenStreamContents(ts,
new String[]{"a","llllllll","i"},
new int[]{0,5,8},
new int[]{4,7,9},
testString.length()
);
}
public void testRandom() throws Exception {
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, tokenizer);
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
return new MappingCharFilter(normMap, reader);
}
};
int numRounds = RANDOM_MULTIPLIER * 10000;
checkRandomData(random(), analyzer, numRounds);
}
//@Ignore("wrong finalOffset: https://issues.apache.org/jira/browse/LUCENE-3971")
public void testFinalOffsetSpecialCase() throws Exception {
final NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
builder.add("t", "");
// even though this below rule has no effect, the test passes if you remove it!!
builder.add("tmakdbl", "c");
final NormalizeCharMap map = builder.build();
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, tokenizer);
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
return new MappingCharFilter(map, reader);
}
};
String text = "gzw f quaxot";
checkAnalysisConsistency(random(), analyzer, false, text);
}
//@Ignore("wrong finalOffset: https://issues.apache.org/jira/browse/LUCENE-3971")
public void testRandomMaps() throws Exception {
int numIterations = atLeast(3);
for (int i = 0; i < numIterations; i++) {
final NormalizeCharMap map = randomMap();
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, tokenizer);
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
return new MappingCharFilter(map, reader);
}
};
int numRounds = 100;
checkRandomData(random(), analyzer, numRounds);
}
}
private NormalizeCharMap randomMap() {
Random random = random();
NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
// we can't add duplicate keys, or NormalizeCharMap gets angry
Set<String> keys = new HashSet<>();
int num = random.nextInt(5);
//System.out.println("NormalizeCharMap=");
for (int i = 0; i < num; i++) {
String key = TestUtil.randomSimpleString(random);
if (!keys.contains(key) && key.length() != 0) {
String value = TestUtil.randomSimpleString(random);
builder.add(key, value);
keys.add(key);
//System.out.println("mapping: '" + key + "' => '" + value + "'");
}
}
return builder.build();
}
public void testRandomMaps2() throws Exception {
final Random random = random();
final int numIterations = atLeast(3);
for(int iter=0;iter<numIterations;iter++) {
if (VERBOSE) {
System.out.println("\nTEST iter=" + iter);
}
final char endLetter = (char) TestUtil.nextInt(random, 'b', 'z');
final Map<String,String> map = new HashMap<>();
final NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
final int numMappings = atLeast(5);
if (VERBOSE) {
System.out.println(" mappings:");
}
while (map.size() < numMappings) {
final String key = TestUtil.randomSimpleStringRange(random, 'a', endLetter, 7);
if (key.length() != 0 && !map.containsKey(key)) {
final String value = TestUtil.randomSimpleString(random);
map.put(key, value);
builder.add(key, value);
if (VERBOSE) {
System.out.println(" " + key + " -> " + value);
}
}
}
final NormalizeCharMap charMap = builder.build();
if (VERBOSE) {
System.out.println(" test random documents...");
}
for(int iter2=0;iter2<100;iter2++) {
final String content = TestUtil.randomSimpleStringRange(random, 'a', endLetter, atLeast(1000));
if (VERBOSE) {
System.out.println(" content=" + content);
}
// Do stupid dog-slow mapping:
// Output string:
final StringBuilder output = new StringBuilder();
// Maps output offset to input offset:
final List<Integer> inputOffsets = new ArrayList<>();
int cumDiff = 0;
int charIdx = 0;
while(charIdx < content.length()) {
int matchLen = -1;
String matchRepl = null;
for(Map.Entry<String,String> ent : map.entrySet()) {
final String match = ent.getKey();
if (charIdx + match.length() <= content.length()) {
final int limit = charIdx+match.length();
boolean matches = true;
for(int charIdx2=charIdx;charIdx2<limit;charIdx2++) {
if (match.charAt(charIdx2-charIdx) != content.charAt(charIdx2)) {
matches = false;
break;
}
}
if (matches) {
final String repl = ent.getValue();
if (match.length() > matchLen) {
// Greedy: longer match wins
matchLen = match.length();
matchRepl = repl;
}
}
}
}
if (matchLen != -1) {
// We found a match here!
if (VERBOSE) {
System.out.println(" match=" + content.substring(charIdx, charIdx+matchLen) + " @ off=" + charIdx + " repl=" + matchRepl);
}
output.append(matchRepl);
final int minLen = Math.min(matchLen, matchRepl.length());
// Common part, directly maps back to input
// offset:
for(int outIdx=0;outIdx<minLen;outIdx++) {
inputOffsets.add(output.length() - matchRepl.length() + outIdx + cumDiff);
}
cumDiff += matchLen - matchRepl.length();
charIdx += matchLen;
if (matchRepl.length() < matchLen) {
// Replacement string is shorter than matched
// input: nothing to do
} else if (matchRepl.length() > matchLen) {
// Replacement string is longer than matched
// input: for all the "extra" chars we map
// back to a single input offset:
for(int outIdx=matchLen;outIdx<matchRepl.length();outIdx++) {
inputOffsets.add(output.length() + cumDiff - 1);
}
} else {
// Same length: no change to offset
}
assert inputOffsets.size() == output.length(): "inputOffsets.size()=" + inputOffsets.size() + " vs output.length()=" + output.length();
} else {
inputOffsets.add(output.length() + cumDiff);
output.append(content.charAt(charIdx));
charIdx++;
}
}
final String expected = output.toString();
if (VERBOSE) {
System.out.print(" expected:");
for(int charIdx2=0;charIdx2<expected.length();charIdx2++) {
System.out.print(" " + expected.charAt(charIdx2) + "/" + inputOffsets.get(charIdx2));
}
System.out.println();
}
final MappingCharFilter mapFilter = new MappingCharFilter(charMap, new StringReader(content));
final StringBuilder actualBuilder = new StringBuilder();
final List<Integer> actualInputOffsets = new ArrayList<>();
// Now consume the actual mapFilter, somewhat randomly:
while (true) {
if (random.nextBoolean()) {
final int ch = mapFilter.read();
if (ch == -1) {
break;
}
actualBuilder.append((char) ch);
} else {
final char[] buffer = new char[TestUtil.nextInt(random, 1, 100)];
final int off = buffer.length == 1 ? 0 : random.nextInt(buffer.length-1);
final int count = mapFilter.read(buffer, off, buffer.length-off);
if (count == -1) {
break;
} else {
actualBuilder.append(buffer, off, count);
}
}
if (random.nextInt(10) == 7) {
// Map offsets
while(actualInputOffsets.size() < actualBuilder.length()) {
actualInputOffsets.add(mapFilter.correctOffset(actualInputOffsets.size()));
}
}
}
// Finish mappping offsets
while(actualInputOffsets.size() < actualBuilder.length()) {
actualInputOffsets.add(mapFilter.correctOffset(actualInputOffsets.size()));
}
final String actual = actualBuilder.toString();
// Verify:
assertEquals(expected, actual);
assertEquals(inputOffsets, actualInputOffsets);
}
}
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.actions;
import com.intellij.execution.Location;
import com.intellij.execution.PsiLocation;
import com.intellij.execution.RunManager;
import com.intellij.execution.RunnerAndConfigurationSettings;
import com.intellij.execution.configurations.ConfigurationType;
import com.intellij.execution.configurations.ConfigurationTypeUtil;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.junit.RuntimeConfigurationProducer;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.List;
/**
* Context for creating run configurations from a location in the source code.
*
* @see RunConfigurationProducer
*/
public class ConfigurationContext {
private static final Logger LOG = Logger.getInstance("#com.intellij.execution.actions.ConfigurationContext");
private final Location<PsiElement> myLocation;
private RunnerAndConfigurationSettings myConfiguration;
private Ref<RunnerAndConfigurationSettings> myExistingConfiguration;
private final Module myModule;
private final RunConfiguration myRuntimeConfiguration;
private final Component myContextComponent;
public static Key<ConfigurationContext> SHARED_CONTEXT = Key.create("SHARED_CONTEXT");
private List<RuntimeConfigurationProducer> myPreferredProducers;
private List<ConfigurationFromContext> myConfigurationsFromContext;
public static ConfigurationContext getFromContext(DataContext dataContext) {
final ConfigurationContext context = new ConfigurationContext(dataContext);
final DataManager dataManager = DataManager.getInstance();
ConfigurationContext sharedContext = dataManager.loadFromDataContext(dataContext, SHARED_CONTEXT);
if (sharedContext == null ||
sharedContext.getLocation() == null ||
context.getLocation() == null ||
!Comparing.equal(sharedContext.getLocation().getPsiElement(), context.getLocation().getPsiElement())) {
sharedContext = context;
dataManager.saveInDataContext(dataContext, SHARED_CONTEXT, sharedContext);
}
return sharedContext;
}
private ConfigurationContext(final DataContext dataContext) {
myRuntimeConfiguration = RunConfiguration.DATA_KEY.getData(dataContext);
myContextComponent = PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext);
myModule = LangDataKeys.MODULE.getData(dataContext);
@SuppressWarnings({"unchecked"})
final Location<PsiElement> location = (Location<PsiElement>)Location.DATA_KEY.getData(dataContext);
if (location != null) {
myLocation = location;
return;
}
final Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) {
myLocation = null;
return;
}
final PsiElement element = getSelectedPsiElement(dataContext, project);
if (element == null) {
myLocation = null;
return;
}
myLocation = new PsiLocation<PsiElement>(project, myModule, element);
}
/**
* Returns the configuration created from this context.
*
* @return the configuration, or null if none of the producers were able to create a configuration from this context.
*/
@Nullable
public RunnerAndConfigurationSettings getConfiguration() {
if (myConfiguration == null) createConfiguration();
return myConfiguration;
}
private void createConfiguration() {
LOG.assertTrue(myConfiguration == null);
final Location location = getLocation();
myConfiguration = location != null ?
PreferredProducerFind.createConfiguration(location, this) :
null;
}
public void setConfiguration(RunnerAndConfigurationSettings configuration) {
myConfiguration = configuration;
}
@Deprecated
@Nullable
public RunnerAndConfigurationSettings updateConfiguration(final RuntimeConfigurationProducer producer) {
myConfiguration = producer.getConfiguration();
return myConfiguration;
}
/**
* Returns the source code location for this context.
*
* @return the source code location, or null if no source code fragment is currently selected.
*/
@Nullable
public Location getLocation() {
return myLocation;
}
/**
* Returns the PSI element at caret for this context.
*
* @return the PSI element, or null if no source code fragment is currently selected.
*/
@Nullable
public PsiElement getPsiLocation() {
return myLocation != null ? myLocation.getPsiElement() : null;
}
/**
* Finds an existing run configuration matching the context.
*
* @return an existing configuration, or null if none was found.
*/
@Nullable
public RunnerAndConfigurationSettings findExisting() {
if (myExistingConfiguration != null) return myExistingConfiguration.get();
myExistingConfiguration = new Ref<RunnerAndConfigurationSettings>();
if (myLocation == null) {
return null;
}
final PsiElement psiElement = myLocation.getPsiElement();
if (!psiElement.isValid()) {
return null;
}
final List<RuntimeConfigurationProducer> producers = findPreferredProducers();
if (myRuntimeConfiguration != null) {
if (producers != null) {
for (RuntimeConfigurationProducer producer : producers) {
final RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(myLocation, this);
if (configuration != null && configuration.getConfiguration() == myRuntimeConfiguration) {
myExistingConfiguration.set(configuration);
}
}
}
for (RunConfigurationProducer producer : Extensions.getExtensions(RunConfigurationProducer.EP_NAME)) {
RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(this);
if (configuration != null && configuration.getConfiguration() == myRuntimeConfiguration) {
myExistingConfiguration.set(configuration);
}
}
}
if (producers != null) {
for (RuntimeConfigurationProducer producer : producers) {
final RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(myLocation, this);
if (configuration != null) {
myExistingConfiguration.set(configuration);
}
}
}
for (RunConfigurationProducer producer : Extensions.getExtensions(RunConfigurationProducer.EP_NAME)) {
RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(this);
if (configuration != null) {
myExistingConfiguration.set(configuration);
}
}
return myExistingConfiguration.get();
}
@Nullable
private static PsiElement getSelectedPsiElement(final DataContext dataContext, final Project project) {
PsiElement element = null;
final Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
if (editor != null){
final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (psiFile != null) {
final int offset = editor.getCaretModel().getOffset();
element = psiFile.findElementAt(offset);
if (element == null && offset > 0 && offset == psiFile.getTextLength()) {
element = psiFile.findElementAt(offset-1);
}
}
}
if (element == null) {
final PsiElement[] elements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataContext);
element = elements != null && elements.length > 0 ? elements[0] : null;
}
if (element == null) {
final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext);
if (files != null && files.length > 0) {
element = PsiManager.getInstance(project).findFile(files[0]);
}
}
return element;
}
public RunManager getRunManager() {
return RunManager.getInstance(getProject());
}
public Project getProject() {
return myLocation.getProject();
}
public Module getModule() {
return myModule;
}
public DataContext getDataContext() {
return DataManager.getInstance().getDataContext(myContextComponent);
}
/**
* Returns original {@link RunConfiguration} from this context.
* For example, it could be some test framework runtime configuration that had been launched
* and that had brought a result test tree on which a right-click action was performed.
*
* @param type {@link ConfigurationType} instance to filter original runtime configuration by its type
* @return {@link RunConfiguration} instance, it could be null
*/
@Nullable
public RunConfiguration getOriginalConfiguration(@Nullable ConfigurationType type) {
if (type == null) {
return myRuntimeConfiguration;
}
if (myRuntimeConfiguration != null
&& ConfigurationTypeUtil.equals(myRuntimeConfiguration.getType(), type)) {
return myRuntimeConfiguration;
}
return null;
}
@Deprecated
@Nullable
public List<RuntimeConfigurationProducer> findPreferredProducers() {
if (myPreferredProducers == null) {
myPreferredProducers = PreferredProducerFind.findPreferredProducers(myLocation, this, true);
}
return myPreferredProducers;
}
public List<ConfigurationFromContext> getConfigurationsFromContext() {
if (myConfigurationsFromContext == null) {
myConfigurationsFromContext = PreferredProducerFind.getConfigurationsFromContext(myLocation, this, true);
}
return myConfigurationsFromContext;
}
}
|
|
/*L
* Copyright ScenPro Inc, SAIC-F
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cadsr-cdecurate/LICENSE.txt for details.
*/
// Copyright (c) 2002 ScenPro, Inc.
// $Header: /CVSNT/cdecurate/src/gov/nih/nci/cadsr/cdecurate/tool/EVS_UserBean.java,v 1.15 2007/01/12 21:35:07 shegde
// Exp $
// $Name: not supported by cvs2svn $
package gov.nih.nci.cadsr.cdecurate.tool;
// import gov.nih.nci.evs.domain.Source;
import java.io.Serializable;
import java.util.Hashtable;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
/**
* The UserBean encapsulates the EVS information from caDSR database and will be stored in the session after the user
* has logged on.
* <P>
*
* @author Sumana
* @version 3.0
*/
public final class EVS_UserBeanStub implements Serializable
{
private static final long serialVersionUID = 8161299425314596067L;
private String m_evsConURL; // connection string
private Vector m_vocabNameList; // index name of the vocab
// that are not displayed
private Vector m_vocabDisplayList; // drop down list of names
private String m_vocabName; // vocab name used to query
// cacore api
private String m_vocabDisplay; // vocab name displayed in
// the jsp
private String m_vocabDBOrigin; // vocab name stored in
// cadsr table as origin and
// vocab name in the search
// results
private String m_vocabMetaSource; // Meta source to the
// specific vocabulary
private boolean m_vocabUseParent; // true or false value to
// mark it to be used for
// parent search
private String m_SearchInName; // display term for search
// in of name option
private String m_SearchInConCode; // display term for search
// in of Concept Code option
private String m_SearchInMetaCode; // display term for search
// in of MetaCode option
private String m_NameType; // Vocab type to search
// concept name
private String m_PropName; // evs property for concept
// name attribute
private String m_PropNameDisp; // evs property for concept
// name attribute
private String m_PropDefinition; // evs property for
// definition attribute
private String m_PropHDSyn; // evs property for header
// concept attribute
private String m_PropRetCon; // evs property for retired
// concept property
private String m_PropSemantic; // evs property for Symantic
// Type property
private String m_retSearch; // retired option for search
// filter
private String m_treeSearch; // tree display option for
// search filter
private String m_includeMeta; // retired option for search
// filter
private String m_codeType; // code types specific to
// each vocab
private String m_defDefaultValue; // definition default value
// if value doesn't exists
private String m_metaDispName; // meta display name used
// for commapring
private String m_dsrDispName; // dsr database display name
// used for commapring
private Vector m_NCIDefSrcList; // list of definition
// sources for NCI in the
// priority order
private Hashtable m_metaCodeType; // code type for meta
// thesaurus with filter
// value
private Hashtable m_vocab_attr; // attributes specific to
// vocabs
private String PrefVocabSrc; // source of the preferred
// vocabulary
private String PrefVocab; // name of the preferred
// vocabulary
private String _vocabAccess;
private static final Logger logger = Logger.getLogger(EVS_UserBeanStub.class);
/**
* Constructor
*/
public EVS_UserBeanStub()
{
logger.debug("here");
}
/**
* The getEVSConURL method returns the evs connection string for this bean.
*
* @return String The connection string
*/
public String getEVSConURL()
{
return m_evsConURL;
}
/**
* The setEVSConURL method sets the evs connection string for this bean.
*
* @param evsURL
* The connection string to set
*/
public void setEVSConURL(String evsURL)
{
m_evsConURL = evsURL;
}
/**
* gets the list of vocabs from the bean
*
* @return m_vocabNameList list of vocabulary names
*/
public Vector getVocabNameList()
{
return m_vocabNameList;
}
/**
* this sets the list of vocab names into the bean stored in database
*
* @param vName
* list of vocab names from the database
*/
public void setVocabNameList(Vector vName)
{
m_vocabNameList = vName;
}
/**
* gets the list of vocabs from the bean
*
* @return m_vocabDisplayList list of vocabulary Display names
*/
public Vector getVocabDisplayList()
{
return m_vocabDisplayList;
}
/**
* this sets the list of vocab Display names into the bean stored in database
*
* @param vDisplay
* list of vocab Display names from the database
*/
public void setVocabDisplayList(Vector vDisplay)
{
m_vocabDisplayList = vDisplay;
}
/**
* The getVocabName method returns the name of the vocab used for quering the cacore api.
*
* @return String The name of the vocab
*/
public String getVocabName()
{
return m_vocabName;
}
/**
* The setVocabName method sets the name of the vocab for querying the cacore api.
*
* @param sName
* The name of the vocab
*/
public void setVocabName(String sName)
{
m_vocabName = sName;
}
/**
* The getVocabDisplay method returns the Display of the vocab used for quering the cacore api.
*
* @return String The Display of the vocab
*/
public String getVocabDisplay()
{
return m_vocabDisplay;
}
/**
* The setVocabDisplay method sets the Display of the vocab for querying the cacore api.
*
* @param sDisplay
* The Display of the vocab
*/
public void setVocabDisplay(String sDisplay)
{
m_vocabDisplay = sDisplay;
}
/**
* The getVocabDBOrigin method returns the DBOrigin of the vocab used for quering the cacore api.
*
* @return String The DBOrigin of the vocab
*/
public String getVocabDBOrigin()
{
return m_vocabDBOrigin;
}
/**
* The setVocabDBOrigin method sets the DBOrigin of the vocab for querying the cacore api.
*
* @param sDBOrigin
* The DBOrigin of the vocab
*/
public void setVocabDBOrigin(String sDBOrigin)
{
m_vocabDBOrigin = sDBOrigin;
}
/**
* The getVocabMetaSource method returns the MetaSource of the vocab used for quering the cacore api.
*
* @return String The MetaSource of the vocab
*/
public String getVocabMetaSource()
{
return m_vocabMetaSource;
}
/**
* The setVocabMetaSource method sets the MetaSource of the vocab for querying the cacore api.
*
* @param sMetaSource
* The MetaSource of the vocab
*/
public void setVocabMetaSource(String sMetaSource)
{
m_vocabMetaSource = sMetaSource;
}
/**
* The getVocabUseParent method returns the the true or false value to use as vocab parent.
*
* @return boolean true or false value The parent use of the vocab
*/
public boolean getVocabUseParent()
{
return m_vocabUseParent;
}
/**
* The setVocabUseParent method sets the true or false value to use as vocab parent.
*
* @param bUseParent
* The True or False value to use the vocab as parent
*/
public void setVocabUseParent(boolean bUseParent)
{
m_vocabUseParent = bUseParent;
}
/**
* gets the display name for the Name option of evs searchin
*
* @return m_SearchInName display name option of evs searchin
*/
public String getSearchInName()
{
return m_SearchInName;
}
/**
* this sets the display name for the Name option of evs searchin into the bean stored in database
*
* @param sData
* the display name for the Name option of evs searchin from the database
*/
public void setSearchInName(String sData)
{
m_SearchInName = sData;
}
/**
* gets the display name for the Concept Code option of evs searchin
*
* @return m_SearchInName display Concept Code option of evs searchin
*/
public String getSearchInConCode()
{
return m_SearchInConCode;
}
/**
* this sets the display name for the Concept Code option of evs searchin into the bean stored in database
*
* @param sData
* the display name for the Concept Code option of evs searchin from the database
*/
public void setSearchInConCode(String sData)
{
m_SearchInConCode = sData;
}
/**
* gets the display name for the Meta Code option of evs searchin
*
* @return m_SearchInName display Meta Code option of evs searchin
*/
public String getSearchInMetaCode()
{
return m_SearchInMetaCode;
}
/**
* this sets the display name for the Meta Code option of evs searchin into the bean stored in database
*
* @param sData
* the display name for the Meta Code option of evs searchin from the database
*/
public void setSearchInMetaCode(String sData)
{
m_SearchInMetaCode = sData;
}
/**
* The getNameType method returns the Type for name search in for the vocab for this bean.
*
* @return String the Type for name search in for the vocab
*/
public String getNameType()
{
return m_NameType;
}
/**
* The setType method sets the Type for name search in for the vocab for this bean.
*
* @param sNType
* the sNType for name search in for the vocab
*/
public void setNameType(String sNType)
{
m_NameType = sNType;
}
/**
* The getPropName method returns the concept property string for concept for this bean.
*
* @return String The property string for concept name
*/
public String getPropName()
{
return m_PropName;
}
/**
* The setPropName method sets the concept property string used to search concept name for this bean.
*
* @param sName
* The property string for concept name to set
*/
public void setPropName(String sName)
{
m_PropName = sName;
}
/**
* The getPropNameDisp method returns the concept name disp property string for concept for this bean.
*
* @return String The property string for concept name disp
*/
public String getPropNameDisp()
{
return m_PropNameDisp;
}
/**
* The setPropNameDisp method sets the concept property string used to search concept name for this bean.
*
* @param sName
* The property string for concept name display to set
*/
public void setPropNameDisp(String sName)
{
m_PropNameDisp = sName;
}
/**
* The getPropDefinition method returns the concept property string for concept for this bean.
*
* @return String The property string for concept Definition
*/
public String getPropDefinition()
{
return m_PropDefinition;
}
/**
* The setPropDefinition method sets the concept property string used to search concept Definition for this bean.
*
* @param sDefinition
* The property string for concept Definition to set
*/
public void setPropDefinition(String sDefinition)
{
m_PropDefinition = sDefinition;
}
/**
* The getPropHDSyn method returns the concept property string for concept for this bean.
*
* @return String The property string for concept header concept
*/
public String getPropHDSyn()
{
return m_PropHDSyn;
}
/**
* The setPropHDSyn method sets the concept property string used to search concept header concept for this bean.
*
* @param sHDSyn
* The property string for concept header concept to set
*/
public void setPropHDSyn(String sHDSyn)
{
m_PropHDSyn = sHDSyn;
}
/**
* The getPropRetCon method returns the concept property string for concept for this bean.
*
* @return String The property string for retired concept
*/
public String getPropRetCon()
{
return m_PropRetCon;
}
/**
* The setPropRetCon method sets the concept property string used to search retired concept for this bean.
*
* @param sRetCon
* The property string for retired concept to set
*/
public void setPropRetCon(String sRetCon)
{
m_PropRetCon = sRetCon;
}
/**
* The getPropSemantic method returns the concept property string for concept for this bean.
*
* @return String The property string for concept Semantic Type
*/
public String getPropSemantic()
{
return m_PropSemantic;
}
/**
* The setPropSemantic method sets the concept property string used to search concept Semantic Type for this bean.
*
* @param sSemantic
* The property string for concept Semantic Type to set
*/
public void setPropSemantic(String sSemantic)
{
m_PropSemantic = sSemantic;
}
/**
* The getRetSearch method returns the RetSearch status for this bean.
*
* @return String Whether this vocab is to display is a RetSearch or not
*/
public String getRetSearch()
{
return m_retSearch;
}
/**
* The setRetSearch method sets the RetSearch status for this bean.
*
* @param isRetSearch
* The RetSearch option for the vocabulary for JSP
*/
public void setRetSearch(String isRetSearch)
{
m_retSearch = isRetSearch;
}
/**
* @return Returns the m_treeSearch.
*/
public String getTreeSearch()
{
return m_treeSearch;
}
/**
* @param search
* The m_treeSearch to set.
*/
public void setTreeSearch(String search)
{
m_treeSearch = search;
}
/**
* The getIncludeMeta method returns the IncludeMeta vocabulary name for this bean.
*
* @return String Whether this vocab is associated with another vocab like Meta thesarus
*/
public String getIncludeMeta()
{
return (m_includeMeta == null) ? "" : m_includeMeta;
}
/**
* The setIncludeMeta method sets the IncludeMeta vocabulary name for this bean.
*
* @param sMetaName
* The Meta vocab name associated with another vocab
*/
public void setIncludeMeta(String sMetaName)
{
m_includeMeta = sMetaName;
}
/**
* The getCode_Type method returns the concept code type (altname type or evs source) specific to the vocabulary.
*
* @return String m_codeType is a string
*/
public String getVocabCodeType()
{
return m_codeType;
}
/**
* stores the vocab code type in the bean
*
* @param sType
* evs source type or altname type of the vocabulary
*/
public void setVocabCodeType(String sType)
{
m_codeType = sType;
}
/**
* @return boolean to mark web access
*/
public boolean vocabIsSecure()
{
return _vocabAccess != null;
}
/**
* @return string code of we access
*/
public String getVocabAccess()
{
return _vocabAccess;
}
/**
* @param code_
* string code of we access
*/
public void setVocabAccess(String code_)
{
_vocabAccess = code_;
}
/**
* The getDefDefaultValue method returns the default definition value.
*
* @return String m_defDefaultValue is a string
*/
public String getDefDefaultValue()
{
return m_defDefaultValue;
}
/**
* stores the default value for the defiinition used if definition from api is empty
*
* @param sDef
* default definition
*/
public void setDefDefaultValue(String sDef)
{
m_defDefaultValue = sDef;
}
/**
* The getMetaDispName method returns the meta thesaurs name display.
*
* @return String m_metaDispName is a string
*/
public String getMetaDispName()
{
return m_metaDispName;
}
/**
* stores the meta name for display
*
* @param sName
* meta name
*/
public void setMetaDispName(String sName)
{
m_metaDispName = sName;
}
/**
* The getDSRDispName method returns the DSR name display.
*
* @return String m_dsrDispName is a string
*/
public String getDSRDispName()
{
return m_dsrDispName;
}
/**
* stores the DSR name for display
*
* @param sName
* DSR name
*/
public void setDSRDispName(String sName)
{
m_dsrDispName = sName;
}
/**
* gets the list of NCI definition sources to filter out
*
* @return m_NCIDefSrcList list of defintion sources
*/
public Vector getNCIDefSrcList()
{
return m_NCIDefSrcList;
}
/**
* this sets the list of NCI definition sources to filter out
*
* @param vName
* list of NCI definition sources
*/
public void setNCIDefSrcList(Vector vName)
{
m_NCIDefSrcList = vName;
}
/**
* The getVocab_Attr method returns the attributes specific to the vocabulary.
*
* @return Hashtable m_vocab_attr is a hash table with
*/
public Hashtable getVocab_Attr()
{
return m_vocab_attr;
}
/**
* stores the vocab specific attributes in the hash table
*
* @param vocAttr
* hashtable with vocab name and user bean as objects
*/
public void setVocab_Attr(Hashtable vocAttr)
{
m_vocab_attr = vocAttr;
}
/**
* The getMetaCodeType method returns code type for the meta thesaurus.
*
* @return Hashtable m_metaCodeType is a hash table with code type and filter value
*/
public Hashtable getMetaCodeType()
{
return m_metaCodeType;
}
/**
* stores the vocab specific attributes in the hash table
*
* @param metaType
* hashtable with vocab name and user bean as objects
*/
public void setMetaCodeType(Hashtable metaType)
{
m_metaCodeType = metaType;
}
/**
* @return Returns the prefVocabSrc.
*/
public String getPrefVocabSrc()
{
return PrefVocabSrc;
}
/**
* @param prefVocabSrc
* The prefVocabSrc to set.
*/
public void setPrefVocabSrc(String prefVocabSrc)
{
PrefVocabSrc = prefVocabSrc;
}
/**
* @return Returns the prefVocab.
*/
public String getPrefVocab()
{
return PrefVocab;
}
/**
* @param prefVocab
* The prefVocab to set.
*/
public void setPrefVocab(String prefVocab)
{
PrefVocab = prefVocab;
}
public java.util.List getEVSVocabs(String eURL)
{
logger.debug("here");
return null;
}
/**
* gets EVS related data from tools options table at login instead of hardcoding
*
* @param req
* request object
* @param res
* rsponse object
* @param servlet
* servlet object
*/
public void getEVSInfoFromDSR(HttpServletRequest req, HttpServletResponse res, CurationServlet servlet)
{
logger.debug("here");
}
public EVS_UserBean storeVocabAttr(EVS_UserBean vuBean, Vector vAttr)
{
logger.debug("here");
return vuBean;
}
}
|
|
package me.doubledutch.pikadb;
import java.io.*;
public abstract class Variant implements Comparable<Variant>{
public final static int STOP=0;
public final static int INTEGER=1;
public final static int FLOAT=2;
public final static int STRING=3;
public final static int DOUBLE=4;
public final static int BOOLEAN=5;
public final static int DELETE=6;
public final static int SKIP=7;
public final static int LONG=8;
public final static int SHORT=9;
public final static int BYTE=10;
// public static Variant skipVariant=new Variant.Skip();
public abstract int getOID();
public abstract int getType();
public abstract int getSize();
public abstract Object getObjectValue();
public abstract void writeVariant(DataOutput out) throws IOException;
// public abstract void skipValue(DataInput in) throws IOException;
public abstract int compareTo(Variant v);
public static void deleteValues(ObjectSet set,Page page) throws IOException{
DataInput in=page.getDataInput();
int offset=0;
// TODO: we could be skipping our way through it here
Variant v=readVariant(in,set);
while(v!=null){
// Read values
if(set.contains(v.getOID())){
byte[] deleteData=new byte[v.getSize()];
for(int i=0;i<deleteData.length;i++){
deleteData[i]=DELETE;
}
page.addDiff(offset,deleteData);
}
offset+=v.getSize();
v=readVariant(in,set);
}
}
public static Variant createVariant(int oid,Object obj){
if(obj instanceof java.lang.Byte){
return new Variant.Byte(oid,(java.lang.Byte)obj);
}
if(obj instanceof java.lang.Short){
return new Variant.Short(oid,(java.lang.Short)obj);
}
if(obj instanceof java.lang.Integer){
return new Variant.Integer(oid,(java.lang.Integer)obj);
}
if(obj instanceof java.lang.String){
return new Variant.String(oid,(java.lang.String)obj);
}
if(obj instanceof java.lang.Float){
return new Variant.Float(oid,(java.lang.Float)obj);
}
if(obj instanceof java.lang.Double){
return new Variant.Double(oid,(java.lang.Double)obj);
}
if(obj instanceof java.lang.Boolean){
return new Variant.Boolean(oid,(java.lang.Boolean)obj);
}
if(obj instanceof java.lang.Long){
return new Variant.Long(oid,(java.lang.Long)obj);
}
return null;
}
public static Variant readVariant(DataInput in,ObjectSet set) throws IOException{
byte type=in.readByte();
if(type==DELETE){
// TODO: check that this works and think about an accumulative fix
return new Variant.Skip(1);
}
if(type==STOP){
return null;
}
int oid=in.readInt();
if(set.contains(oid)){
switch(type){
case BYTE:return Variant.Byte.readValue(oid,in);
case SHORT:return Variant.Short.readValue(oid,in);
case INTEGER:return Variant.Integer.readValue(oid,in);
case LONG:return Variant.Long.readValue(oid,in);
case FLOAT:return Variant.Float.readValue(oid,in);
case DOUBLE:return Variant.Double.readValue(oid,in);
case STRING:return Variant.String.readValue(oid,in);
case BOOLEAN:return Variant.Boolean.readValue(oid,in);
}
}else{
switch(type){
case BYTE:return Variant.Byte.skipValue(in);
case SHORT:return Variant.Short.skipValue(in);
case INTEGER:return Variant.Integer.skipValue(in);
case LONG:return Variant.Long.skipValue(in);
case FLOAT:return Variant.Float.skipValue(in);
case DOUBLE:return Variant.Double.skipValue(in);
case STRING:return Variant.String.skipValue(in);
case BOOLEAN:return Variant.Boolean.skipValue(in);
}
}
return null;
}
public byte[] toByteArray() throws IOException{
ByteArrayOutputStream data=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(data);
writeVariant(out);
out.flush();
out.close();
return data.toByteArray();
}
public static class Skip extends Variant{
private final int size;
public Skip(int size){
this.size=size;
}
public int getSize(){
return size;
}
public int getOID(){
return -1;
}
public int getType(){
return SKIP;
}
public Object getObjectValue(){
return -1;
}
public int getValue(){
return -1;
}
public void writeVariant(DataOutput out) throws IOException{
}
public byte[] toByteArray() throws IOException{
return new byte[0];
}
public static Variant readValue(int oid,DataInput in) throws IOException{
// return Variant.skipVariant;
return null;
}
public static Variant skipValue(DataInput in) throws IOException{
// return skipVariant;
return null;
}
public int compareTo(Variant v){
return 0;
}
}
public static class Integer extends Variant{
private final int oid;
private final int value;
public Integer(int oid,int value){
this.oid=oid;
this.value=value;
}
public int getSize(){
return 1+4+4;
}
public int getOID(){
return oid;
}
public int getType(){
return INTEGER;
}
public Object getObjectValue(){
return value;
}
public int getValue(){
return value;
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(INTEGER);
out.writeInt(oid);
out.writeInt(value);
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(4);
return new Variant.Skip(1+4+4);
}
public static Variant.Integer readValue(int oid,DataInput in) throws IOException{
return new Variant.Integer(oid,in.readInt());
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
if(bval<value)return -1;
if(bval==value)return 0;
return 1;
case SHORT:
int shval=((Variant.Short)v).getValue();
if(shval<value)return -1;
if(shval==value)return 0;
return 1;
case INTEGER:
int ival=((Variant.Integer)v).getValue();
if(ival<value)return -1;
if(ival==value)return 0;
return 1;
case LONG:
long lval=((Variant.Long)v).getValue();
if(lval<value)return -1;
if(lval==value)return 0;
return 1;
case FLOAT:
float fval=((Variant.Float)v).getValue();
if(fval<value)return -1;
if(fval==value)return 0;
return 1;
case DOUBLE:
double dval=((Variant.Double)v).getValue();
if(dval<value)return -1;
if(dval==value)return 0;
return 1;
case BOOLEAN:
return -1;
case STRING:
java.lang.String sval=((Variant.String)v).getValue();
java.lang.String str=java.lang.String.valueOf(value);
return str.compareTo(sval);
}
return 0;
}
}
public static class Boolean extends Variant{
private final int oid;
private final boolean value;
public Boolean(int oid,boolean value){
this.oid=oid;
this.value=value;
}
public int getSize(){
return 1+4+1;
}
public int getOID(){
return oid;
}
public int getType(){
return BOOLEAN;
}
public Object getObjectValue(){
return value;
}
public boolean getValue(){
return value;
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(BOOLEAN);
out.writeInt(oid);
out.writeBoolean(value);
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(1);
return new Variant.Skip(1+4+1);
}
public static Variant.Boolean readValue(int oid,DataInput in) throws IOException{
return new Variant.Boolean(oid,in.readBoolean());
}
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
return 0;
case SHORT:
return 0;
case INTEGER:
return 0;
case LONG:
return 0;
case FLOAT:
return 0;
case DOUBLE:
return 0;
case BOOLEAN:
return 0;
case STRING:
return 0;
}
return 0;
}
}
public static class Float extends Variant{
private final int oid;
private final float value;
public Float(int oid,float value){
this.oid=oid;
this.value=value;
}
public int getOID(){
return oid;
}
public int getSize(){
return 1+4+4;
}
public int getType(){
return FLOAT;
}
public Object getObjectValue(){
return value;
}
public float getValue(){
return value;
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(4);
return new Variant.Skip(1+4+4);
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(FLOAT);
out.writeInt(oid);
out.writeFloat(value);
}
public static Variant.Float readValue(int oid,DataInput in) throws IOException{
return new Variant.Float(oid,in.readFloat());
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
if(bval<value)return -1;
if(bval==value)return 0;
return 1;
case SHORT:
int shval=((Variant.Short)v).getValue();
if(shval<value)return -1;
if(shval==value)return 0;
return 1;
case INTEGER:
int ival=((Variant.Integer)v).getValue();
if(ival<value)return -1;
if(ival==value)return 0;
return 1;
case LONG:
long lval=((Variant.Long)v).getValue();
if(lval<value)return -1;
if(lval==value)return 0;
return 1;
case FLOAT:
float fval=((Variant.Float)v).getValue();
if(fval<value)return -1;
if(fval==value)return 0;
return 1;
case DOUBLE:
double dval=((Variant.Double)v).getValue();
if(dval<value)return -1;
if(dval==value)return 0;
return 1;
case BOOLEAN:
return -1;
case STRING:
java.lang.String sval=((Variant.String)v).getValue();
java.lang.String str=java.lang.String.valueOf(value);
return str.compareTo(sval);
}
return 0;
}
}
public static class Double extends Variant{
private final int oid;
private final double value;
public Double(int oid,double value){
this.oid=oid;
this.value=value;
}
public int getOID(){
return oid;
}
public int getSize(){
return 1+4+8;
}
public int getType(){
return DOUBLE;
}
public Object getObjectValue(){
return value;
}
public double getValue(){
return value;
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(8);
return new Variant.Skip(1+4+8);
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(DOUBLE);
out.writeInt(oid);
out.writeDouble(value);
}
public static Variant.Double readValue(int oid,DataInput in) throws IOException{
return new Variant.Double(oid,in.readDouble());
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
if(bval<value)return -1;
if(bval==value)return 0;
return 1;
case SHORT:
int shval=((Variant.Short)v).getValue();
if(shval<value)return -1;
if(shval==value)return 0;
return 1;
case INTEGER:
int ival=((Variant.Integer)v).getValue();
if(ival<value)return -1;
if(ival==value)return 0;
return 1;
case LONG:
long lval=((Variant.Long)v).getValue();
if(lval<value)return -1;
if(lval==value)return 0;
return 1;
case FLOAT:
float fval=((Variant.Float)v).getValue();
if(fval<value)return -1;
if(fval==value)return 0;
return 1;
case DOUBLE:
double dval=((Variant.Double)v).getValue();
if(dval<value)return -1;
if(dval==value)return 0;
return 1;
case BOOLEAN:
return -1;
case STRING:
java.lang.String sval=((Variant.String)v).getValue();
java.lang.String str=java.lang.String.valueOf(value);
return str.compareTo(sval);
}
return 0;
}
}
public static class Long extends Variant{
private final int oid;
private final long value;
public Long(int oid,long value){
this.oid=oid;
this.value=value;
}
public int getOID(){
return oid;
}
public int getSize(){
return 1+4+8;
}
public int getType(){
return LONG;
}
public Object getObjectValue(){
return value;
}
public long getValue(){
return value;
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(8);
return new Variant.Skip(1+4+8);
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(LONG);
out.writeInt(oid);
out.writeLong(value);
}
public static Variant.Long readValue(int oid,DataInput in) throws IOException{
return new Variant.Long(oid,in.readLong());
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
if(bval<value)return -1;
if(bval==value)return 0;
return 1;
case SHORT:
int shval=((Variant.Short)v).getValue();
if(shval<value)return -1;
if(shval==value)return 0;
return 1;
case INTEGER:
int ival=((Variant.Integer)v).getValue();
if(ival<value)return -1;
if(ival==value)return 0;
return 1;
case LONG:
long lval=((Variant.Long)v).getValue();
if(lval<value)return -1;
if(lval==value)return 0;
return 1;
case FLOAT:
float fval=((Variant.Float)v).getValue();
if(fval<value)return -1;
if(fval==value)return 0;
return 1;
case DOUBLE:
double dval=((Variant.Double)v).getValue();
if(dval<value)return -1;
if(dval==value)return 0;
return 1;
case BOOLEAN:
return -1;
case STRING:
java.lang.String sval=((Variant.String)v).getValue();
java.lang.String str=java.lang.String.valueOf(value);
return str.compareTo(sval);
}
return 0;
}
}
public static class Short extends Variant{
private final int oid;
private final short value;
public Short(int oid,short value){
this.oid=oid;
this.value=value;
}
public int getOID(){
return oid;
}
public int getSize(){
return 1+4+2;
}
public int getType(){
return SHORT;
}
public Object getObjectValue(){
return value;
}
public short getValue(){
return value;
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(2);
return new Variant.Skip(1+4+2);
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(SHORT);
out.writeInt(oid);
out.writeLong(value);
}
public static Variant.Short readValue(int oid,DataInput in) throws IOException{
return new Variant.Short(oid,in.readShort());
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
if(bval<value)return -1;
if(bval==value)return 0;
return 1;
case SHORT:
int shval=((Variant.Short)v).getValue();
if(shval<value)return -1;
if(shval==value)return 0;
return 1;
case INTEGER:
int ival=((Variant.Integer)v).getValue();
if(ival<value)return -1;
if(ival==value)return 0;
return 1;
case LONG:
long lval=((Variant.Long)v).getValue();
if(lval<value)return -1;
if(lval==value)return 0;
return 1;
case FLOAT:
float fval=((Variant.Float)v).getValue();
if(fval<value)return -1;
if(fval==value)return 0;
return 1;
case DOUBLE:
double dval=((Variant.Double)v).getValue();
if(dval<value)return -1;
if(dval==value)return 0;
return 1;
case BOOLEAN:
return -1;
case STRING:
java.lang.String sval=((Variant.String)v).getValue();
java.lang.String str=java.lang.String.valueOf(value);
return str.compareTo(sval);
}
return 0;
}
}
public static class Byte extends Variant{
private final int oid;
private final byte value;
public Byte(int oid,byte value){
this.oid=oid;
this.value=value;
}
public int getOID(){
return oid;
}
public int getSize(){
return 1+4+1;
}
public int getType(){
return BYTE;
}
public Object getObjectValue(){
return value;
}
public byte getValue(){
return value;
}
public static Variant skipValue(DataInput in) throws IOException{
in.skipBytes(1);
return new Variant.Skip(1+4+1);
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(BYTE);
out.writeInt(oid);
out.writeLong(value);
}
public static Variant.Byte readValue(int oid,DataInput in) throws IOException{
return new Variant.Byte(oid,in.readByte());
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
if(bval<value)return -1;
if(bval==value)return 0;
return 1;
case SHORT:
int shval=((Variant.Short)v).getValue();
if(shval<value)return -1;
if(shval==value)return 0;
return 1;
case INTEGER:
int ival=((Variant.Integer)v).getValue();
if(ival<value)return -1;
if(ival==value)return 0;
return 1;
case LONG:
long lval=((Variant.Long)v).getValue();
if(lval<value)return -1;
if(lval==value)return 0;
return 1;
case FLOAT:
float fval=((Variant.Float)v).getValue();
if(fval<value)return -1;
if(fval==value)return 0;
return 1;
case DOUBLE:
double dval=((Variant.Double)v).getValue();
if(dval<value)return -1;
if(dval==value)return 0;
return 1;
case BOOLEAN:
return -1;
case STRING:
java.lang.String sval=((Variant.String)v).getValue();
java.lang.String str=java.lang.String.valueOf(value);
return str.compareTo(sval);
}
return 0;
}
}
public static class String extends Variant{
private final int oid;
private final java.lang.String value;
public String(int oid,java.lang.String value){
this.oid=oid;
this.value=value;
}
public int getOID(){
return oid;
}
public int getSize(){
return 1+4+2+2*value.length();
}
public int getType(){
return STRING;
}
public Object getObjectValue(){
return value;
}
public java.lang.String getValue(){
return value;
}
public static Variant skipValue(DataInput in) throws IOException{
short s=in.readShort();
in.skipBytes(s*2);
return new Variant.Skip(1+4+2+2*s);
}
public void writeVariant(DataOutput out) throws IOException{
out.writeByte(STRING);
out.writeInt(oid);
out.writeShort((short)value.length());
for(int i=0;i<value.length();i++){
out.writeChar(value.charAt(i));
}
}
public static Variant.String readValue(int oid,DataInput in) throws IOException{
short length=in.readShort();
char[] data=new char[length];
for(int i=0;i<length;i++){
data[i]=in.readChar();
}
return new Variant.String(oid,new java.lang.String(data));
}
// Returns a negative integer, zero, or a positive integer as this object is
// less than, equal to, or greater than the specified object.
public int compareTo(Variant v){
switch(v.getType()){
case BYTE:
int bval=((Variant.Byte)v).getValue();
java.lang.String sval=java.lang.String.valueOf(bval);
return value.compareTo(sval);
case SHORT:
int shval=((Variant.Short)v).getValue();
sval=java.lang.String.valueOf(shval);
return value.compareTo(sval);
case INTEGER:
int ival=((Variant.Integer)v).getValue();
sval=java.lang.String.valueOf(ival);
return value.compareTo(sval);
case LONG:
long lval=((Variant.Long)v).getValue();
sval=java.lang.String.valueOf(lval);
return value.compareTo(sval);
case FLOAT:
float fval=((Variant.Float)v).getValue();
sval=java.lang.String.valueOf(fval);
return value.compareTo(sval);
case DOUBLE:
double dval=((Variant.Double)v).getValue();
sval=java.lang.String.valueOf(dval);
return value.compareTo(sval);
case BOOLEAN:
return -1;
case STRING:
sval=((Variant.String)v).getValue();
return value.compareTo(sval);
}
return 0;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet;
import static org.apache.drill.exec.store.parquet.TestFileGenerator.populateFieldInfoMap;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import io.netty.buffer.DrillBuf;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import mockit.Injectable;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.util.FileUtils;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.memory.RootAllocatorFactory;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.physical.impl.OutputMutator;
import org.apache.drill.exec.proto.BitControl;
import org.apache.drill.exec.proto.UserBitShared.QueryType;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.rpc.UserClientConnection;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.store.CachedSingleFileSystem;
import org.apache.drill.exec.store.TestOutputMutator;
import org.apache.drill.exec.store.parquet.columnreaders.ParquetRecordReader;
import org.apache.drill.exec.util.CallBack;
import org.apache.drill.exec.util.Text;
import org.apache.drill.exec.vector.BigIntVector;
import org.apache.drill.exec.vector.NullableBigIntVector;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.bytes.BytesInput;
import org.apache.parquet.column.page.DataPageV1;
import org.apache.parquet.column.page.PageReadStore;
import org.apache.parquet.column.page.PageReader;
import org.apache.parquet.hadoop.CodecFactory;
import org.apache.parquet.hadoop.Footer;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.schema.MessageType;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.base.Stopwatch;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
@Ignore
public class ParquetRecordReaderTest extends BaseTestQuery {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetRecordReaderTest.class);
static final boolean VERBOSE_DEBUG = false;
private static final int numberRowGroups = 1;
private static final int recordsPerRowGroup = 300;
private static int DEFAULT_BYTES_PER_PAGE = 1024 * 1024 * 1;
private static final String fileName = "/tmp/parquet_test_file_many_types";
@BeforeClass
public static void generateFile() throws Exception {
final File f = new File(fileName);
final ParquetTestProperties props =
new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, new HashMap<String, FieldInfo>());
populateFieldInfoMap(props);
if (!f.exists()) {
TestFileGenerator.generateParquetFile(fileName, props);
}
}
@Test
public void testMultipleRowGroupsAndReads3() throws Exception {
final String planName = "/parquet/parquet_scan_screen.json";
testParquetFullEngineLocalPath(planName, fileName, 2, numberRowGroups, recordsPerRowGroup);
}
public String getPlanForFile(String pathFileName, String parquetFileName) throws IOException {
return Files.toString(FileUtils.getResourceAsFile(pathFileName), Charsets.UTF_8)
.replaceFirst("&REPLACED_IN_PARQUET_TEST&", parquetFileName);
}
@Test
public void testMultipleRowGroupsAndReads2() throws Exception {
final StringBuilder readEntries = new StringBuilder();
// number of times to read the file
int i = 3;
for (int j = 0; j < i; j++) {
readEntries.append('"');
readEntries.append(fileName);
readEntries.append('"');
if (j < i - 1) {
readEntries.append(',');
}
}
final String planText = Files.toString(FileUtils.getResourceAsFile(
"/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8).replaceFirst(
"&REPLACED_IN_PARQUET_TEST&", readEntries.toString());
testParquetFullEngineLocalText(planText, fileName, i, numberRowGroups, recordsPerRowGroup, true);
}
@Test
public void testDictionaryError() throws Exception {
testFull(QueryType.SQL, "select L_RECEIPTDATE from dfs.`/tmp/lineitem_null_dict.parquet`", "", 1, 1, 100000, false);
}
@Test
public void testNullableAgg() throws Exception {
final List<QueryDataBatch> result = testSqlWithResults(
"select sum(a) as total_sum from dfs.`/tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
final QueryDataBatch b = result.get(0);
loader.load(b.getHeader().getDef(), b.getData());
final VectorWrapper vw = loader.getValueAccessorById(
NullableBigIntVector.class,
loader.getValueVectorId(SchemaPath.getCompoundPath("total_sum")).getFieldIds()
);
assertEquals(4999950000l, vw.getValueVector().getAccessor().getObject(0));
b.release();
loader.clear();
}
@Test
public void testNullableFilter() throws Exception {
final List<QueryDataBatch> result = testSqlWithResults(
"select count(wr_return_quantity) as row_count from dfs.`/tmp/web_returns` where wr_return_quantity = 1");
assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
final QueryDataBatch b = result.get(0);
loader.load(b.getHeader().getDef(), b.getData());
final VectorWrapper vw = loader.getValueAccessorById(
BigIntVector.class,
loader.getValueVectorId(SchemaPath.getCompoundPath("row_count")).getFieldIds()
);
assertEquals(3573l, vw.getValueVector().getAccessor().getObject(0));
b.release();
loader.clear();
}
@Test
public void testFixedBinary() throws Exception {
final String readEntries = "\"/tmp/drilltest/fixed_binary.parquet\"";
final String planText = Files.toString(FileUtils.getResourceAsFile(
"/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8)
.replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
testParquetFullEngineLocalText(planText, fileName, 1, 1, 1000000, false);
}
@Test
public void testNonNullableDictionaries() throws Exception {
testFull(QueryType.SQL,
"select * from dfs.`/tmp/drilltest/non_nullable_dictionary.parquet`", "", 1, 1, 30000000, false);
}
@Test
public void testNullableVarCharMemory() throws Exception {
testFull(QueryType.SQL,
"select s_comment,s_suppkey from dfs.`/tmp/sf100_supplier.parquet`", "", 1, 1, 1000, false);
}
@Test
public void testReadVoter() throws Exception {
testFull(QueryType.SQL, "select * from dfs.`/tmp/voter.parquet`", "", 1, 1, 1000, false);
}
@Test
public void testDrill_1314() throws Exception {
testFull(QueryType.SQL, "select l_partkey " +
"from dfs.`/tmp/drill_1314.parquet`", "", 1, 1, 10000, false);
}
@Test
public void testDrill_1314_all_columns() throws Exception {
testFull(QueryType.SQL, "select * from dfs.`/tmp/drill_1314.parquet`", "", 1, 1, 10000, false);
}
@Test
public void testDictionaryError_419() throws Exception {
testFull(QueryType.SQL,
"select c_address from dfs.`/tmp/customer_snappyimpala_drill_419.parquet`", "", 1, 1, 150000, false);
}
@Test
public void testNonExistentColumn() throws Exception {
testFull(QueryType.SQL,
"select non_existent_column from cp.`tpch/nation.parquet`", "", 1, 1, 150000, false);
}
@Test
public void testNonExistentColumnLargeFile() throws Exception {
testFull(QueryType.SQL,
"select non_existent_column, non_existent_col_2 from dfs.`/tmp/customer.dict.parquet`", "", 1, 1, 150000, false);
}
@Test
public void testNonExistentColumnsSomePresentColumnsLargeFile() throws Exception {
testFull(QueryType.SQL,
"select cust_key, address, non_existent_column, non_existent_col_2 from dfs.`/tmp/customer.dict.parquet`",
"", 1, 1, 150000, false);
}
@Ignore // ignored for now for performance
@Test
public void testTPCHPerformace_SF1() throws Exception {
testFull(QueryType.SQL,
"select * from dfs.`/tmp/orders_part-m-00001.parquet`", "", 1, 1, 150000, false);
}
@Test
public void testLocalDistributed() throws Exception {
final String planName = "/parquet/parquet_scan_union_screen_physical.json";
testParquetFullEngineLocalTextDistributed(planName, fileName, 1, numberRowGroups, recordsPerRowGroup);
}
@Test
@Ignore
public void testRemoteDistributed() throws Exception {
final String planName = "/parquet/parquet_scan_union_screen_physical.json";
testParquetFullEngineRemote(planName, fileName, 1, numberRowGroups, recordsPerRowGroup);
}
public void testParquetFullEngineLocalPath(String planFileName, String filename,
int numberOfTimesRead /* specified in json plan */,
int numberOfRowGroups, int recordsPerRowGroup) throws Exception {
testParquetFullEngineLocalText(Files.toString(FileUtils.getResourceAsFile(planFileName), Charsets.UTF_8), filename,
numberOfTimesRead, numberOfRowGroups, recordsPerRowGroup, true);
}
//specific tests should call this method, but it is not marked as a test itself intentionally
public void testParquetFullEngineLocalText(String planText, String filename,
int numberOfTimesRead /* specified in json plan */,
int numberOfRowGroups, int recordsPerRowGroup, boolean testValues) throws Exception {
testFull(QueryType.LOGICAL, planText, filename, numberOfTimesRead, numberOfRowGroups, recordsPerRowGroup, testValues);
}
private void testFull(QueryType type, String planText, String filename,
int numberOfTimesRead /* specified in json plan */,
int numberOfRowGroups, int recordsPerRowGroup, boolean testValues) throws Exception {
// final RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
final HashMap<String, FieldInfo> fields = new HashMap<>();
final ParquetTestProperties props =
new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, fields);
TestFileGenerator.populateFieldInfoMap(props);
final ParquetResultListener resultListener =
new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
final Stopwatch watch = Stopwatch.createStarted();
testWithListener(type, planText, resultListener);
resultListener.getResults();
// batchLoader.clear();
System.out.println(String.format("Took %d ms to run query", watch.elapsed(TimeUnit.MILLISECONDS)));
}
//use this method to submit physical plan
public void testParquetFullEngineLocalTextDistributed(String planName, String filename,
int numberOfTimesRead /* specified in json plan */,
int numberOfRowGroups, int recordsPerRowGroup) throws Exception {
String planText = Files.toString(FileUtils.getResourceAsFile(planName), Charsets.UTF_8);
testFull(QueryType.PHYSICAL, planText, filename, numberOfTimesRead, numberOfRowGroups, recordsPerRowGroup, true);
}
public String pad(String value, int length) {
return pad(value, length, " ");
}
public String pad(String value, int length, String with) {
final StringBuilder result = new StringBuilder(length);
result.append(value);
while (result.length() < length) {
result.insert(0, with);
}
return result.toString();
}
public void testParquetFullEngineRemote(String plan, String filename,
int numberOfTimesRead /* specified in json plan */,
int numberOfRowGroups, int recordsPerRowGroup) throws Exception {
final HashMap<String, FieldInfo> fields = new HashMap<>();
final ParquetTestProperties props =
new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, fields);
TestFileGenerator.populateFieldInfoMap(props);
final ParquetResultListener resultListener =
new ParquetResultListener(getAllocator(), props, numberOfTimesRead, true);
testWithListener(QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile(plan), Charsets.UTF_8), resultListener);
resultListener.getResults();
}
private static class MockOutputMutator implements OutputMutator {
private final List<MaterializedField> removedFields = Lists.newArrayList();
private final List<ValueVector> addFields = Lists.newArrayList();
List<MaterializedField> getRemovedFields() {
return removedFields;
}
List<ValueVector> getAddFields() {
return addFields;
}
@Override
public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz) throws SchemaChangeException {
return null;
}
@Override
public void allocate(int recordCount) {
}
@Override
public boolean isNewSchema() {
return false;
}
@Override
public DrillBuf getManagedBuffer() {
return allocator.buffer(255);
}
@Override
public CallBack getCallBack() {
return null;
}
}
private void validateFooters(final List<Footer> metadata) {
logger.debug(metadata.toString());
assertEquals(3, metadata.size());
for (Footer footer : metadata) {
final File file = new File(footer.getFile().toUri());
assertTrue(file.getName(), file.getName().startsWith("part"));
assertTrue(file.getPath(), file.exists());
final ParquetMetadata parquetMetadata = footer.getParquetMetadata();
assertEquals(2, parquetMetadata.getBlocks().size());
final Map<String, String> keyValueMetaData = parquetMetadata.getFileMetaData().getKeyValueMetaData();
assertEquals("bar", keyValueMetaData.get("foo"));
assertEquals(footer.getFile().getName(), keyValueMetaData.get(footer.getFile().getName()));
}
}
private void validateContains(MessageType schema, PageReadStore pages, String[] path, int values, BytesInput bytes)
throws IOException {
PageReader pageReader = pages.getPageReader(schema.getColumnDescription(path));
DataPageV1 page = (DataPageV1) pageReader.readPage();
assertEquals(values, page.getValueCount());
assertArrayEquals(bytes.toByteArray(), page.getBytes().toByteArray());
}
@Test
public void testMultipleRowGroups() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(2, 300, DEFAULT_BYTES_PER_PAGE, fields);
populateFieldInfoMap(props);
testParquetFullEngineEventBased(true, "/parquet/parquet_scan_screen.json", "/tmp/test.parquet", 1, props);
}
// TODO - Test currently marked ignore to prevent breaking of the build process, requires a binary file that was
// generated using pig. Will need to find a good place to keep files like this.
// For now I will upload it to the JIRA as an attachment.
@Test
public void testNullableColumns() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 1500000, DEFAULT_BYTES_PER_PAGE, fields);
Object[] boolVals = {true, null, null};
props.fields.put("a", new FieldInfo("boolean", "a", 1, boolVals, TypeProtos.MinorType.BIT, props));
testParquetFullEngineEventBased(false, "/parquet/parquet_nullable.json", "/tmp/nullable_test.parquet", 1, props);
}
@Test
/**
* Tests the reading of nullable var length columns, runs the tests twice, once on a file that has
* a converted type of UTF-8 to make sure it can be read
*/
public void testNullableColumnsVarLen() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 300000, DEFAULT_BYTES_PER_PAGE, fields);
byte[] val = {'b'};
byte[] val2 = {'b', '2'};
byte[] val3 = {'b', '3'};
byte[] val4 = { 'l','o','n','g','e','r',' ','s','t','r','i','n','g'};
Object[] byteArrayVals = { val, val2, val4};
props.fields.put("a", new FieldInfo("boolean", "a", 1, byteArrayVals, TypeProtos.MinorType.BIT, props));
testParquetFullEngineEventBased(false, "/parquet/parquet_nullable_varlen.json", "/tmp/nullable_varlen.parquet", 1, props);
HashMap<String, FieldInfo> fields2 = new HashMap<>();
// pass strings instead of byte arrays
Object[] textVals = { new org.apache.drill.exec.util.Text("b"), new org.apache.drill.exec.util.Text("b2"),
new org.apache.drill.exec.util.Text("b3") };
ParquetTestProperties props2 = new ParquetTestProperties(1, 30000, DEFAULT_BYTES_PER_PAGE, fields2);
props2.fields.put("a", new FieldInfo("boolean", "a", 1, textVals, TypeProtos.MinorType.BIT, props2));
testParquetFullEngineEventBased(false, "/parquet/parquet_scan_screen_read_entry_replace.json",
"\"/tmp/varLen.parquet/a\"", "unused", 1, props2);
}
@Test
public void testFileWithNulls() throws Exception {
HashMap<String, FieldInfo> fields3 = new HashMap<>();
ParquetTestProperties props3 = new ParquetTestProperties(1, 3000, DEFAULT_BYTES_PER_PAGE, fields3);
// actually include null values
Object[] valuesWithNull = {new Text(""), new Text("longer string"), null};
props3.fields.put("a", new FieldInfo("boolean", "a", 1, valuesWithNull, TypeProtos.MinorType.BIT, props3));
testParquetFullEngineEventBased(false, "/parquet/parquet_scan_screen_read_entry_replace.json",
"\"/tmp/nullable_with_nulls.parquet\"", "unused", 1, props3);
}
@Test
public void testDictionaryEncoding() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 25, DEFAULT_BYTES_PER_PAGE, fields);
Object[] boolVals = null;
props.fields.put("n_name", null);
props.fields.put("n_nationkey", null);
props.fields.put("n_regionkey", null);
props.fields.put("n_comment", null);
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json",
"\"/tmp/nation_dictionary_fail.parquet\"", "unused", 1, props, QueryType.LOGICAL);
fields = new HashMap<>();
props = new ParquetTestProperties(1, 5, DEFAULT_BYTES_PER_PAGE, fields);
props.fields.put("employee_id", null);
props.fields.put("name", null);
props.fields.put("role", null);
props.fields.put("phone", null);
props.fields.put("password_hash", null);
props.fields.put("gender_male", null);
props.fields.put("height", null);
props.fields.put("hair_thickness", null);
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json",
"\"/tmp/employees_5_16_14.parquet\"", "unused", 1, props, QueryType.LOGICAL);
}
@Test
public void testMultipleRowGroupsAndReads() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(4, 3000, DEFAULT_BYTES_PER_PAGE, fields);
populateFieldInfoMap(props);
String readEntries = "";
// number of times to read the file
int i = 3;
for (int j = 0; j < i; j++) {
readEntries += "\"/tmp/test.parquet\"";
if (j < i - 1) {
readEntries += ",";
}
}
testParquetFullEngineEventBased(true, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"/tmp/test.parquet", i, props);
}
@Test
public void testReadError_Drill_901() throws Exception {
// select cast( L_COMMENT as varchar) from dfs_test.`/tmp/drilltest/employee_parquet`
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 60175, DEFAULT_BYTES_PER_PAGE, fields);
testParquetFullEngineEventBased(false, false, "/parquet/par_writer_test.json", null,
"unused, no file is generated", 1, props, QueryType.PHYSICAL);
}
@Test
public void testReadError_Drill_839() throws Exception {
// select cast( L_COMMENT as varchar) from dfs.`/tmp/drilltest/employee_parquet`
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 150000, DEFAULT_BYTES_PER_PAGE, fields);
String readEntries = "\"/tmp/customer_nonull.parquet\"";
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"unused, no file is generated", 1, props, QueryType.LOGICAL);
}
@Test
public void testReadBug_Drill_418() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 150000, DEFAULT_BYTES_PER_PAGE, fields);
TestFileGenerator.populateDrill_418_fields(props);
String readEntries = "\"/tmp/customer.plain.parquet\"";
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"unused, no file is generated", 1, props, QueryType.LOGICAL);
}
// requires binary file generated by pig from TPCH data, also have to disable assertion where data is coming in
@Test
public void testMultipleRowGroupsAndReadsPigError() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 1500000, DEFAULT_BYTES_PER_PAGE, fields);
TestFileGenerator.populatePigTPCHCustomerFields(props);
String readEntries = "\"/tmp/tpc-h/customer\"";
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"unused, no file is generated", 1, props, QueryType.LOGICAL);
fields = new HashMap<>();
props = new ParquetTestProperties(1, 100000, DEFAULT_BYTES_PER_PAGE, fields);
TestFileGenerator.populatePigTPCHSupplierFields(props);
readEntries = "\"/tmp/tpc-h/supplier\"";
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"unused, no file is generated", 1, props, QueryType.LOGICAL);
}
@Test
public void test958_sql() throws Exception {
// testFull(QueryType.SQL, "select ss_ext_sales_price from dfs.`/tmp/store_sales`", "", 1, 1, 30000000, false);
testFull(QueryType.SQL, "select * from dfs.`/tmp/store_sales`", "", 1, 1, 30000000, false);
}
@Test
public void drill_958bugTest() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 2880404, DEFAULT_BYTES_PER_PAGE, fields);
TestFileGenerator.populatePigTPCHCustomerFields(props);
String readEntries = "\"/tmp/store_sales\"";
testParquetFullEngineEventBased(false, false, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"unused, no file is generated", 1, props, QueryType.LOGICAL);
}
@Test
public void testMultipleRowGroupsEvent() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(2, 300, DEFAULT_BYTES_PER_PAGE, fields);
populateFieldInfoMap(props);
testParquetFullEngineEventBased(true, "/parquet/parquet_scan_screen.json", "/tmp/test.parquet", 1, props);
}
/**
* Tests the attribute in a scan node to limit the columns read by a scan.
*
* The functionality of selecting all columns is tested in all of the other tests that leave out the attribute.
* @throws Exception
*/
@Test
public void testSelectColumnRead() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(4, 3000, DEFAULT_BYTES_PER_PAGE, fields);
// generate metatdata for a series of test columns, these columns are all generated in the test file
populateFieldInfoMap(props);
TestFileGenerator.generateParquetFile("/tmp/test.parquet", props);
fields.clear();
// create a new object to describe the dataset expected out of the scan operation
// the fields added below match those requested in the plan specified in parquet_selective_column_read.json
// that is used below in the test query
props = new ParquetTestProperties(4, 3000, DEFAULT_BYTES_PER_PAGE, fields);
props.fields.put("integer", new FieldInfo("int32", "integer", 32, TestFileGenerator.intVals, TypeProtos.MinorType.INT, props));
props.fields.put("bigInt", new FieldInfo("int64", "bigInt", 64, TestFileGenerator.longVals, TypeProtos.MinorType.BIGINT, props));
props.fields.put("bin", new FieldInfo("binary", "bin", -1, TestFileGenerator.binVals, TypeProtos.MinorType.VARBINARY, props));
props.fields.put("bin2", new FieldInfo("binary", "bin2", -1, TestFileGenerator.bin2Vals, TypeProtos.MinorType.VARBINARY, props));
testParquetFullEngineEventBased(true, false, "/parquet/parquet_selective_column_read.json", null, "/tmp/test.parquet", 1, props, QueryType.PHYSICAL);
}
@Test
@Ignore
public void testPerformance(@Injectable final DrillbitContext bitContext,
@Injectable UserClientConnection connection) throws Exception {
final DrillConfig c = DrillConfig.create();
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContext context = new FragmentContext(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);
// new NonStrictExpectations() {
// {
// context.getAllocator(); result = BufferAllocator.getAllocator(DrillConfig.create());
// }
// };
final String fileName = "/tmp/parquet_test_performance.parquet";
final HashMap<String, FieldInfo> fields = new HashMap<>();
final ParquetTestProperties props = new ParquetTestProperties(1, 20 * 1000 * 1000, DEFAULT_BYTES_PER_PAGE, fields);
populateFieldInfoMap(props);
//generateParquetFile(fileName, props);
final Configuration dfsConfig = new Configuration();
final List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName));
final Footer f = footers.iterator().next();
final List<SchemaPath> columns = Lists.newArrayList();
columns.add(new SchemaPath("_MAP.integer", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bigInt", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.f", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.d", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.b", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bin", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bin2", ExpressionPosition.UNKNOWN));
int totalRowCount = 0;
final FileSystem fs = new CachedSingleFileSystem(fileName);
final BufferAllocator allocator = RootAllocatorFactory.newRoot(c);
for(int i = 0; i < 25; i++) {
final ParquetRecordReader rr = new ParquetRecordReader(context, fileName, 0, fs,
CodecFactory.createDirectCodecFactory(dfsConfig, new ParquetDirectByteBufferAllocator(allocator), 0),
f.getParquetMetadata(), columns, ParquetReaderUtility.DateCorruptionStatus.META_SHOWS_CORRUPTION);
final TestOutputMutator mutator = new TestOutputMutator(allocator);
rr.setup(null, mutator);
final Stopwatch watch = Stopwatch.createStarted();
int rowCount = 0;
while ((rowCount = rr.next()) > 0) {
totalRowCount += rowCount;
}
System.out.println(String.format("Time completed: %s. ", watch.elapsed(TimeUnit.MILLISECONDS)));
rr.close();
}
allocator.close();
System.out.println(String.format("Total row count %s", totalRowCount));
}
// specific tests should call this method, but it is not marked as a test itself intentionally
public void testParquetFullEngineEventBased(boolean generateNew, String plan, String readEntries, String filename,
int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props) throws Exception {
testParquetFullEngineEventBased(true, generateNew, plan, readEntries,filename,
numberOfTimesRead /* specified in json plan */, props, QueryType.LOGICAL);
}
// specific tests should call this method, but it is not marked as a test itself intentionally
public void testParquetFullEngineEventBased(boolean generateNew, String plan, String filename,
int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props) throws Exception {
testParquetFullEngineEventBased(true, generateNew, plan, null, filename, numberOfTimesRead, props, QueryType.LOGICAL);
}
// specific tests should call this method, but it is not marked as a test itself intentionally
public void testParquetFullEngineEventBased(boolean testValues, boolean generateNew, String plan,
String readEntries, String filename,
int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props,
QueryType queryType) throws Exception {
if (generateNew) {
TestFileGenerator.generateParquetFile(filename, props);
}
final ParquetResultListener resultListener = new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
final long C = System.nanoTime();
String planText = Files.toString(FileUtils.getResourceAsFile(plan), Charsets.UTF_8);
// substitute in the string for the read entries, allows reuse of the plan file for several tests
if (readEntries != null) {
planText = planText.replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
}
testWithListener(queryType, planText, resultListener);
resultListener.getResults();
final long D = System.nanoTime();
System.out.println(String.format("Took %f s to run query", (float)(D-C) / 1E9));
}
@Test
public void testLimit() throws Exception {
List<QueryDataBatch> results = testSqlWithResults("SELECT * FROM cp.`/parquet/tpch/nation/01.parquet` LIMIT 1");
int recordsInOutput = 0;
for (QueryDataBatch batch : results) {
recordsInOutput += batch.getHeader().getDef().getRecordCount();
batch.release();
}
assertTrue(String.format("Number of records in output is wrong: expected=%d, actual=%s", 1, recordsInOutput), 1 == recordsInOutput);
}
@Test
public void testLimitBeyondRowCount() throws Exception {
List<QueryDataBatch> results = testSqlWithResults("SELECT * FROM cp.`/parquet/tpch/nation/01.parquet` LIMIT 100");
int recordsInOutput = 0;
for (QueryDataBatch batch : results) {
recordsInOutput += batch.getHeader().getDef().getRecordCount();
batch.release();
}
assertTrue(String.format("Number of records in output is wrong: expected=%d, actual=%s", 9, recordsInOutput), 9 == recordsInOutput);
}
@Test
public void testLimitMultipleRowGroups() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(3, 100, 1024 * 1024, fields);
populateFieldInfoMap(props);
TestFileGenerator.generateParquetFile("/tmp/testLimit.parquet", props);
List<QueryDataBatch> results = testSqlWithResults("SELECT * FROM dfs.`/tmp/testLimit.parquet` LIMIT 225");
int recordsInOutput = 0;
for (QueryDataBatch batch : results) {
recordsInOutput += batch.getHeader().getDef().getRecordCount();
batch.release();
}
assertTrue(String.format("Number of records in output is wrong: expected=%d, actual=%s", 225, recordsInOutput), 225 == recordsInOutput);
}
@Test
public void testLimitMultipleRowGroupsBeyondRowCount() throws Exception {
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(3, 100, 1024 * 1024, fields);
populateFieldInfoMap(props);
TestFileGenerator.generateParquetFile("/tmp/testLimit.parquet", props);
List<QueryDataBatch> results = testSqlWithResults("SELECT * FROM dfs.`/tmp/testLimit.parquet` LIMIT 500");
int recordsInOutput = 0;
for (QueryDataBatch batch : results) {
recordsInOutput += batch.getHeader().getDef().getRecordCount();
batch.release();
}
assertTrue(String.format("Number of records in output is wrong: expected=%d, actual=%s", 300, recordsInOutput), 300 == recordsInOutput);
}
}
|
|
package de.danoeh.antennapod.gpoddernet;
import org.apache.commons.lang3.Validate;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.auth.AuthenticationException;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.auth.BasicScheme;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import de.danoeh.antennapod.gpoddernet.model.GpodnetDevice;
import de.danoeh.antennapod.gpoddernet.model.GpodnetPodcast;
import de.danoeh.antennapod.gpoddernet.model.GpodnetSubscriptionChange;
import de.danoeh.antennapod.gpoddernet.model.GpodnetTag;
import de.danoeh.antennapod.gpoddernet.model.GpodnetUploadChangesResponse;
import de.danoeh.antennapod.preferences.GpodnetPreferences;
import de.danoeh.antennapod.service.download.AntennapodHttpClient;
/**
* Communicates with the gpodder.net service.
*/
public class GpodnetService {
private static final String BASE_SCHEME = "https";
public static final String DEFAULT_BASE_HOST = "gpodder.net";
private final String BASE_HOST;
private final HttpClient httpClient;
public GpodnetService() {
httpClient = AntennapodHttpClient.getHttpClient();
BASE_HOST = GpodnetPreferences.getHostname();
}
/**
* Returns the [count] most used tags.
*/
public List<GpodnetTag> getTopTags(int count)
throws GpodnetServiceException {
URI uri;
try {
uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/tags/%d.json", count), null);
} catch (URISyntaxException e1) {
e1.printStackTrace();
throw new IllegalStateException(e1);
}
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
try {
JSONArray jsonTagList = new JSONArray(response);
List<GpodnetTag> tagList = new ArrayList<GpodnetTag>(
jsonTagList.length());
for (int i = 0; i < jsonTagList.length(); i++) {
JSONObject jObj = jsonTagList.getJSONObject(i);
String name = jObj.getString("tag");
int usage = jObj.getInt("usage");
tagList.add(new GpodnetTag(name, usage));
}
return tagList;
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
/**
* Returns the [count] most subscribed podcasts for the given tag.
*
* @throws IllegalArgumentException if tag is null
*/
public List<GpodnetPodcast> getPodcastsForTag(GpodnetTag tag, int count)
throws GpodnetServiceException {
Validate.notNull(tag);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/tag/%s/%d.json", tag.getName(), count), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONArray jsonArray = new JSONArray(response);
return readPodcastListFromJSONArray(jsonArray);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
/**
* Returns the toplist of podcast.
*
* @param count of elements that should be returned. Must be in range 1..100.
* @throws IllegalArgumentException if count is out of range.
*/
public List<GpodnetPodcast> getPodcastToplist(int count)
throws GpodnetServiceException {
Validate.isTrue(count >= 1 && count <= 100, "Count must be in range 1..100");
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/toplist/%d.json", count), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONArray jsonArray = new JSONArray(response);
return readPodcastListFromJSONArray(jsonArray);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
}
/**
* Returns a list of suggested podcasts for the user that is currently
* logged in.
* <p/>
* This method requires authentication.
*
* @param count The
* number of elements that should be returned. Must be in range
* 1..100.
* @throws IllegalArgumentException if count is out of range.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public List<GpodnetPodcast> getSuggestions(int count) throws GpodnetServiceException {
Validate.isTrue(count >= 1 && count <= 100, "Count must be in range 1..100");
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/suggestions/%d.json", count), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONArray jsonArray = new JSONArray(response);
return readPodcastListFromJSONArray(jsonArray);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
/**
* Searches the podcast directory for a given string.
*
* @param query The search query
* @param scaledLogoSize The size of the logos that are returned by the search query.
* Must be in range 1..256. If the value is out of range, the
* default value defined by the gpodder.net API will be used.
*/
public List<GpodnetPodcast> searchPodcasts(String query, int scaledLogoSize)
throws GpodnetServiceException {
String parameters = (scaledLogoSize > 0 && scaledLogoSize <= 256) ? String
.format("q=%s&scale_logo=%d", query, scaledLogoSize) : String
.format("q=%s", query);
try {
URI uri = new URI(BASE_SCHEME, null, BASE_HOST, -1, "/search.json",
parameters, null);
System.out.println(uri.toASCIIString());
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONArray jsonArray = new JSONArray(response);
return readPodcastListFromJSONArray(jsonArray);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
}
/**
* Returns all devices of a given user.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @throws IllegalArgumentException If username is null.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public List<GpodnetDevice> getDevices(String username)
throws GpodnetServiceException {
Validate.notNull(username);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/devices/%s.json", username), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONArray devicesArray = new JSONArray(response);
List<GpodnetDevice> result = readDeviceListFromJSONArray(devicesArray);
return result;
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
/**
* Configures the device of a given user.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @param deviceId The ID of the device that should be configured.
* @throws IllegalArgumentException If username or deviceId is null.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public void configureDevice(String username, String deviceId,
String caption, GpodnetDevice.DeviceType type)
throws GpodnetServiceException {
Validate.notNull(username);
Validate.notNull(deviceId);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/devices/%s/%s.json", username, deviceId), null);
HttpPost request = new HttpPost(uri);
if (caption != null || type != null) {
JSONObject jsonContent = new JSONObject();
if (caption != null) {
jsonContent.put("caption", caption);
}
if (type != null) {
jsonContent.put("type", type.toString());
}
StringEntity strEntity = new StringEntity(
jsonContent.toString(), "UTF-8");
strEntity.setContentType("application/json");
request.setEntity(strEntity);
}
executeRequest(request);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalArgumentException(e);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
throw new IllegalStateException(e);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
/**
* Returns the subscriptions of a specific device.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @param deviceId The ID of the device whose subscriptions should be returned.
* @return A list of subscriptions in OPML format.
* @throws IllegalArgumentException If username or deviceId is null.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public String getSubscriptionsOfDevice(String username, String deviceId)
throws GpodnetServiceException {
Validate.notNull(username);
Validate.notNull(deviceId);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/subscriptions/%s/%s.opml", username, deviceId), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
return response;
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalArgumentException(e);
}
}
/**
* Returns all subscriptions of a specific user.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @return A list of subscriptions in OPML format.
* @throws IllegalArgumentException If username is null.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public String getSubscriptionsOfUser(String username)
throws GpodnetServiceException {
Validate.notNull(username);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/subscriptions/%s.opml", username), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
return response;
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalArgumentException(e);
}
}
/**
* Uploads the subscriptions of a specific device.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @param deviceId The ID of the device whose subscriptions should be updated.
* @param subscriptions A list of feed URLs containing all subscriptions of the
* device.
* @throws IllegalArgumentException If username, deviceId or subscriptions is null.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public void uploadSubscriptions(String username, String deviceId,
List<String> subscriptions) throws GpodnetServiceException {
if (username == null || deviceId == null || subscriptions == null) {
throw new IllegalArgumentException(
"Username, device ID and subscriptions must not be null");
}
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/subscriptions/%s/%s.txt", username, deviceId), null);
HttpPut request = new HttpPut(uri);
StringBuilder builder = new StringBuilder();
for (String s : subscriptions) {
builder.append(s);
builder.append("\n");
}
StringEntity entity = new StringEntity(builder.toString(), "UTF-8");
request.setEntity(entity);
executeRequest(request);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
}
/**
* Updates the subscription list of a specific device.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @param deviceId The ID of the device whose subscriptions should be updated.
* @param added Collection of feed URLs of added feeds. This Collection MUST NOT contain any duplicates
* @param removed Collection of feed URLs of removed feeds. This Collection MUST NOT contain any duplicates
* @return a GpodnetUploadChangesResponse. See {@link de.danoeh.antennapod.gpoddernet.model.GpodnetUploadChangesResponse}
* for details.
* @throws java.lang.IllegalArgumentException if username, deviceId, added or removed is null.
* @throws de.danoeh.antennapod.gpoddernet.GpodnetServiceException if added or removed contain duplicates or if there
* is an authentication error.
*/
public GpodnetUploadChangesResponse uploadChanges(String username, String deviceId, Collection<String> added,
Collection<String> removed) throws GpodnetServiceException {
Validate.notNull(username);
Validate.notNull(deviceId);
Validate.notNull(added);
Validate.notNull(removed);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/subscriptions/%s/%s.json", username, deviceId), null);
final JSONObject requestObject = new JSONObject();
requestObject.put("add", new JSONArray(added));
requestObject.put("remove", new JSONArray(removed));
HttpPost request = new HttpPost(uri);
StringEntity entity = new StringEntity(requestObject.toString(), "UTF-8");
request.setEntity(entity);
final String response = executeRequest(request);
return GpodnetUploadChangesResponse.fromJSONObject(response);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
}
/**
* Returns all subscription changes of a specific device.
* <p/>
* This method requires authentication.
*
* @param username The username. Must be the same user as the one which is
* currently logged in.
* @param deviceId The ID of the device whose subscription changes should be
* downloaded.
* @param timestamp A timestamp that can be used to receive all changes since a
* specific point in time.
* @throws IllegalArgumentException If username or deviceId is null.
* @throws GpodnetServiceAuthenticationException If there is an authentication error.
*/
public GpodnetSubscriptionChange getSubscriptionChanges(String username,
String deviceId, long timestamp) throws GpodnetServiceException {
Validate.notNull(username);
Validate.notNull(deviceId);
String params = String.format("since=%d", timestamp);
String path = String.format("/api/2/subscriptions/%s/%s.json",
username, deviceId);
try {
URI uri = new URI(BASE_SCHEME, null, BASE_HOST, -1, path, params,
null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONObject changes = new JSONObject(response);
return readSubscriptionChangesFromJSONObject(changes);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new IllegalStateException(e);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
/**
* Logs in a specific user. This method must be called if any of the methods
* that require authentication is used.
*
* @throws IllegalArgumentException If username or password is null.
*/
public void authenticate(String username, String password)
throws GpodnetServiceException {
Validate.notNull(username);
Validate.notNull(password);
URI uri;
try {
uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/auth/%s/login.json", username), null);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new GpodnetServiceException();
}
HttpPost request = new HttpPost(uri);
executeRequestWithAuthentication(request, username, password);
}
/**
* Shuts down the GpodnetService's HTTP client. The service will be shut down in a separate thread to avoid
* NetworkOnMainThreadExceptions.
*/
public void shutdown() {
new Thread() {
@Override
public void run() {
AntennapodHttpClient.cleanup();
}
}.start();
}
private String executeRequest(HttpRequestBase request)
throws GpodnetServiceException {
Validate.notNull(request);
String responseString = null;
HttpResponse response = null;
try {
response = httpClient.execute(request);
checkStatusCode(response);
responseString = getStringFromEntity(response.getEntity());
} catch (ClientProtocolException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (IOException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} finally {
if (response != null) {
try {
response.getEntity().consumeContent();
} catch (IOException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
}
return responseString;
}
private String executeRequestWithAuthentication(HttpRequestBase request,
String username, String password) throws GpodnetServiceException {
if (request == null || username == null || password == null) {
throw new IllegalArgumentException(
"request and credentials must not be null");
}
String result = null;
HttpResponse response = null;
try {
Header auth = new BasicScheme().authenticate(
new UsernamePasswordCredentials(username, password),
request);
request.addHeader(auth);
response = httpClient.execute(request);
checkStatusCode(response);
result = getStringFromEntity(response.getEntity());
} catch (ClientProtocolException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (IOException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (AuthenticationException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} finally {
if (response != null) {
try {
response.getEntity().consumeContent();
} catch (IOException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
}
}
return result;
}
private String getStringFromEntity(HttpEntity entity)
throws GpodnetServiceException {
Validate.notNull(entity);
ByteArrayOutputStream outputStream;
int contentLength = (int) entity.getContentLength();
if (contentLength > 0) {
outputStream = new ByteArrayOutputStream(contentLength);
} else {
outputStream = new ByteArrayOutputStream();
}
try {
byte[] buffer = new byte[8 * 1024];
InputStream in = entity.getContent();
int count;
while ((count = in.read(buffer)) > 0) {
outputStream.write(buffer, 0, count);
}
} catch (IOException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
// System.out.println(outputStream.toString());
return outputStream.toString();
}
private void checkStatusCode(HttpResponse response)
throws GpodnetServiceException {
Validate.notNull(response);
int responseCode = response.getStatusLine().getStatusCode();
if (responseCode != HttpStatus.SC_OK) {
if (responseCode == HttpStatus.SC_UNAUTHORIZED) {
throw new GpodnetServiceAuthenticationException("Wrong username or password");
} else {
throw new GpodnetServiceBadStatusCodeException(
"Bad response code: " + responseCode, responseCode);
}
}
}
private List<GpodnetPodcast> readPodcastListFromJSONArray(JSONArray array)
throws JSONException {
Validate.notNull(array);
List<GpodnetPodcast> result = new ArrayList<GpodnetPodcast>(
array.length());
for (int i = 0; i < array.length(); i++) {
result.add(readPodcastFromJSONObject(array.getJSONObject(i)));
}
return result;
}
private GpodnetPodcast readPodcastFromJSONObject(JSONObject object)
throws JSONException {
String url = object.getString("url");
String title;
Object titleObj = object.opt("title");
if (titleObj != null && titleObj instanceof String) {
title = (String) titleObj;
} else {
title = url;
}
String description;
Object descriptionObj = object.opt("description");
if (descriptionObj != null && descriptionObj instanceof String) {
description = (String) descriptionObj;
} else {
description = "";
}
int subscribers = object.getInt("subscribers");
Object logoUrlObj = object.opt("logo_url");
String logoUrl = (logoUrlObj instanceof String) ? (String) logoUrlObj
: null;
if (logoUrl == null) {
Object scaledLogoUrl = object.opt("scaled_logo_url");
if (scaledLogoUrl != null && scaledLogoUrl instanceof String) {
logoUrl = (String) scaledLogoUrl;
}
}
String website = null;
Object websiteObj = object.opt("website");
if (websiteObj != null && websiteObj instanceof String) {
website = (String) websiteObj;
}
String mygpoLink = object.getString("mygpo_link");
return new GpodnetPodcast(url, title, description, subscribers,
logoUrl, website, mygpoLink);
}
private List<GpodnetDevice> readDeviceListFromJSONArray(JSONArray array)
throws JSONException {
Validate.notNull(array);
List<GpodnetDevice> result = new ArrayList<GpodnetDevice>(
array.length());
for (int i = 0; i < array.length(); i++) {
result.add(readDeviceFromJSONObject(array.getJSONObject(i)));
}
return result;
}
private GpodnetDevice readDeviceFromJSONObject(JSONObject object)
throws JSONException {
String id = object.getString("id");
String caption = object.getString("caption");
String type = object.getString("type");
int subscriptions = object.getInt("subscriptions");
return new GpodnetDevice(id, caption, type, subscriptions);
}
private GpodnetSubscriptionChange readSubscriptionChangesFromJSONObject(
JSONObject object) throws JSONException {
Validate.notNull(object);
List<String> added = new LinkedList<String>();
JSONArray jsonAdded = object.getJSONArray("add");
for (int i = 0; i < jsonAdded.length(); i++) {
added.add(jsonAdded.getString(i));
}
List<String> removed = new LinkedList<String>();
JSONArray jsonRemoved = object.getJSONArray("remove");
for (int i = 0; i < jsonRemoved.length(); i++) {
removed.add(jsonRemoved.getString(i));
}
long timestamp = object.getLong("timestamp");
return new GpodnetSubscriptionChange(added, removed, timestamp);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheExistsException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.GridCachePluginContext;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cluster.ChangeGlobalStateFinishMessage;
import org.apache.ignite.internal.processors.cluster.ChangeGlobalStateMessage;
import org.apache.ignite.internal.processors.cluster.DiscoveryDataClusterState;
import org.apache.ignite.internal.processors.query.QuerySchema;
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.internal.processors.query.schema.SchemaOperationException;
import org.apache.ignite.internal.util.GridConcurrentHashSet;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.CachePluginContext;
import org.apache.ignite.plugin.CachePluginProvider;
import org.apache.ignite.plugin.PluginProvider;
import org.apache.ignite.spi.discovery.DiscoveryDataBag;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.cache.CacheMode.LOCAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.events.EventType.EVT_NODE_JOINED;
import static org.apache.ignite.internal.GridComponent.DiscoveryDataExchangeType.CACHE_PROC;
/**
* Logic related to cache discovery data processing.
*/
class ClusterCachesInfo {
/** */
private final GridKernalContext ctx;
/** Dynamic caches. */
private final ConcurrentMap<String, DynamicCacheDescriptor> registeredCaches = new ConcurrentHashMap<>();
/** */
private final ConcurrentMap<Integer, CacheGroupDescriptor> registeredCacheGrps = new ConcurrentHashMap<>();
/** Cache templates. */
private final ConcurrentMap<String, DynamicCacheDescriptor> registeredTemplates = new ConcurrentHashMap<>();
/** Caches currently being restarted. */
private final Collection<String> restartingCaches = new GridConcurrentHashSet<>();
/** */
private final IgniteLogger log;
/** */
private CachesOnDisconnect cachesOnDisconnect;
/** Local cache info */
private CacheJoinNodeDiscoveryData joinDiscoData;
/** Cluster cache info */
private GridData gridData;
/** */
private List<T2<DynamicCacheDescriptor, NearCacheConfiguration>> locJoinStartCaches = Collections.emptyList();
/** */
private Map<String, T2<CacheConfiguration, NearCacheConfiguration>> locCfgsForActivation = Collections.emptyMap();
/** */
private Map<UUID, CacheClientReconnectDiscoveryData> clientReconnectReqs;
/** {@code True} if joined cluster while cluster state change was in progress. */
private boolean joinOnTransition;
/**
* @param ctx Context.
*/
public ClusterCachesInfo(GridKernalContext ctx) {
this.ctx = ctx;
log = ctx.log(getClass());
}
/**
* @param joinDiscoData Information about configured caches and templates.
* @throws IgniteCheckedException If configuration validation failed.
*/
public void onStart(CacheJoinNodeDiscoveryData joinDiscoData) throws IgniteCheckedException {
this.joinDiscoData = joinDiscoData;
Map<String, CacheConfiguration> grpCfgs = new HashMap<>();
for (CacheJoinNodeDiscoveryData.CacheInfo info : joinDiscoData.caches().values()) {
if (info.cacheData().config().getGroupName() == null)
continue;
CacheConfiguration ccfg = grpCfgs.get(info.cacheData().config().getGroupName());
if (ccfg == null)
grpCfgs.put(info.cacheData().config().getGroupName(), info.cacheData().config());
else
validateCacheGroupConfiguration(ccfg, info.cacheData().config());
}
String conflictErr = processJoiningNode(joinDiscoData, ctx.localNodeId(), true);
if (conflictErr != null)
throw new IgniteCheckedException("Failed to start configured cache. " + conflictErr);
}
/**
* @param cacheName Cache name.
* @param grpName Group name.
* @return Group ID.
*/
private int cacheGroupId(String cacheName, @Nullable String grpName) {
assert cacheName != null;
return grpName != null ? CU.cacheId(grpName) : CU.cacheId(cacheName);
}
/**
* @param checkConsistency {@code True} if need check cache configurations consistency.
* @throws IgniteCheckedException If failed.
*/
public void onKernalStart(boolean checkConsistency) throws IgniteCheckedException {
if (gridData != null && gridData.conflictErr != null)
throw new IgniteCheckedException(gridData.conflictErr);
if (gridData != null && gridData.joinDiscoData != null) {
CacheJoinNodeDiscoveryData joinDiscoData = gridData.joinDiscoData;
for (CacheJoinNodeDiscoveryData.CacheInfo locCacheInfo : joinDiscoData.caches().values()) {
CacheConfiguration locCfg = locCacheInfo.cacheData().config();
CacheData cacheData = gridData.gridData.caches().get(locCfg.getName());
if (cacheData != null) {
if (!F.eq(cacheData.sql(), locCacheInfo.sql())) {
throw new IgniteCheckedException("Cache configuration mismatch (local cache was created " +
"via " + (locCacheInfo.sql() ? "CREATE TABLE" : "Ignite API") + ", while remote cache " +
"was created via " + (cacheData.sql() ? "CREATE TABLE" : "Ignite API") + "): " +
locCacheInfo.cacheData().config().getName());
}
if (checkConsistency) {
checkCache(locCacheInfo, cacheData, cacheData.receivedFrom());
ClusterNode rmt = ctx.discovery().node(cacheData.receivedFrom());
if (rmt == null) {
for (ClusterNode node : ctx.discovery().localJoin().discoCache().serverNodes()) {
if (!node.isLocal() && ctx.discovery().cacheAffinityNode(node, locCfg.getName())) {
rmt = node;
break;
}
}
}
if (rmt != null) {
for (PluginProvider p : ctx.plugins().allProviders()) {
CachePluginContext pluginCtx = new GridCachePluginContext(ctx, locCfg);
CachePluginProvider provider = p.createCacheProvider(pluginCtx);
if (provider != null)
provider.validateRemote(locCfg, cacheData.cacheConfiguration(), rmt);
}
}
}
}
if (checkConsistency)
validateStartCacheConfiguration(locCfg);
}
}
gridData = null;
}
/**
* Checks that remote caches has configuration compatible with the local.
*
* @param locInfo Local configuration.
* @param rmtData Remote configuration.
* @param rmt Remote node.
* @throws IgniteCheckedException If check failed.
*/
@SuppressWarnings("unchecked")
private void checkCache(CacheJoinNodeDiscoveryData.CacheInfo locInfo, CacheData rmtData, UUID rmt)
throws IgniteCheckedException {
GridCacheAttributes rmtAttr = new GridCacheAttributes(rmtData.cacheConfiguration());
GridCacheAttributes locAttr = new GridCacheAttributes(locInfo.cacheData().config());
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "cacheMode", "Cache mode",
locAttr.cacheMode(), rmtAttr.cacheMode(), true);
CU.checkAttributeMismatch(log, rmtAttr.groupName(), rmt, "groupName", "Cache group name",
locAttr.groupName(), rmtAttr.groupName(), true);
if (rmtAttr.cacheMode() != LOCAL) {
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "interceptor", "Cache Interceptor",
locAttr.interceptorClassName(), rmtAttr.interceptorClassName(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "atomicityMode",
"Cache atomicity mode", locAttr.atomicityMode(), rmtAttr.atomicityMode(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "cachePreloadMode",
"Cache preload mode", locAttr.cacheRebalanceMode(), rmtAttr.cacheRebalanceMode(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "topologyValidator",
"Cache topology validator", locAttr.topologyValidatorClassName(), rmtAttr.topologyValidatorClassName(), true);
ClusterNode rmtNode = ctx.discovery().node(rmt);
if (CU.affinityNode(ctx.discovery().localNode(), locInfo.cacheData().config().getNodeFilter())
&& rmtNode != null && CU.affinityNode(rmtNode, rmtData.cacheConfiguration().getNodeFilter())) {
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "storeFactory", "Store factory",
locAttr.storeFactoryClassName(), rmtAttr.storeFactoryClassName(), true);
}
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "cacheAffinity", "Cache affinity",
locAttr.cacheAffinityClassName(), rmtAttr.cacheAffinityClassName(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "cacheAffinityMapper",
"Cache affinity mapper", locAttr.cacheAffinityMapperClassName(),
rmtAttr.cacheAffinityMapperClassName(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "affinityPartitionsCount",
"Affinity partitions count", locAttr.affinityPartitionsCount(),
rmtAttr.affinityPartitionsCount(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "evictionFilter", "Eviction filter",
locAttr.evictionFilterClassName(), rmtAttr.evictionFilterClassName(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "evictionPolicy", "Eviction policy",
locAttr.evictionPolicyClassName(), rmtAttr.evictionPolicyClassName(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "transactionManagerLookup",
"Transaction manager lookup", locAttr.transactionManagerLookupClassName(),
rmtAttr.transactionManagerLookupClassName(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "defaultLockTimeout",
"Default lock timeout", locAttr.defaultLockTimeout(), rmtAttr.defaultLockTimeout(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "preloadBatchSize",
"Preload batch size", locAttr.rebalanceBatchSize(), rmtAttr.rebalanceBatchSize(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "rebalanceDelay",
"Rebalance delay", locAttr.rebalanceDelay(), rmtAttr.rebalanceDelay(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "rebalanceBatchesPrefetchCount",
"Rebalance batches prefetch count", locAttr.rebalanceBatchesPrefetchCount(),
rmtAttr.rebalanceBatchesPrefetchCount(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "rebalanceOrder",
"Rebalance order", locAttr.rebalanceOrder(), rmtAttr.rebalanceOrder(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "rebalanceThrottle",
"Rebalance throttle", locAttr.rebalanceThrottle(), rmtAttr.rebalanceThrottle(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "rebalanceTimeout",
"Rebalance timeout", locAttr.rebalanceTimeout(), rmtAttr.rebalanceTimeout(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeSynchronizationMode",
"Write synchronization mode", locAttr.writeSynchronization(), rmtAttr.writeSynchronization(),
true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeBehindBatchSize",
"Write behind batch size", locAttr.writeBehindBatchSize(), rmtAttr.writeBehindBatchSize(),
false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeBehindCoalescing",
"Write behind coalescing", locAttr.writeBehindCoalescing(), rmtAttr.writeBehindCoalescing(),
false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeBehindEnabled",
"Write behind enabled", locAttr.writeBehindEnabled(), rmtAttr.writeBehindEnabled(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeBehindFlushFrequency",
"Write behind flush frequency", locAttr.writeBehindFlushFrequency(),
rmtAttr.writeBehindFlushFrequency(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeBehindFlushSize",
"Write behind flush size", locAttr.writeBehindFlushSize(), rmtAttr.writeBehindFlushSize(),
false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "writeBehindFlushThreadCount",
"Write behind flush thread count", locAttr.writeBehindFlushThreadCount(),
rmtAttr.writeBehindFlushThreadCount(), false);
if (locAttr.cacheMode() == PARTITIONED) {
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "nearEvictionPolicy",
"Near eviction policy", locAttr.nearEvictionPolicyClassName(),
rmtAttr.nearEvictionPolicyClassName(), false);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "affinityIncludeNeighbors",
"Affinity include neighbors", locAttr.affinityIncludeNeighbors(),
rmtAttr.affinityIncludeNeighbors(), true);
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "affinityKeyBackups",
"Affinity key backups", locAttr.affinityKeyBackups(),
rmtAttr.affinityKeyBackups(), true);
}
}
}
/**
* @param msg Message.
* @param node Node sent message.
*/
public void onClientCacheChange(ClientCacheChangeDiscoveryMessage msg, ClusterNode node) {
Map<Integer, Boolean> startedCaches = msg.startedCaches();
if (startedCaches != null) {
for (Map.Entry<Integer, Boolean> e : startedCaches.entrySet()) {
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
if (e.getKey().equals(desc.cacheId())) {
ctx.discovery().addClientNode(desc.cacheName(), node.id(), e.getValue());
break;
}
}
}
}
Set<Integer> closedCaches = msg.closedCaches();
if (closedCaches != null) {
for (Integer cacheId : closedCaches) {
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
if (cacheId.equals(desc.cacheId())) {
ctx.discovery().onClientCacheClose(desc.cacheName(), node.id());
break;
}
}
}
}
}
/**
* @param batch Cache change request.
* @param topVer Topology version.
* @return {@code True} if minor topology version should be increased.
*/
public boolean onCacheChangeRequested(DynamicCacheChangeBatch batch, AffinityTopologyVersion topVer) {
DiscoveryDataClusterState state = ctx.state().clusterState();
if (state.active() && !state.transition()) {
ExchangeActions exchangeActions = new ExchangeActions();
CacheChangeProcessResult res = processCacheChangeRequests(exchangeActions,
batch.requests(),
topVer,
false);
if (res.needExchange) {
assert !exchangeActions.empty() : exchangeActions;
batch.exchangeActions(exchangeActions);
}
return res.needExchange;
}
else {
IgniteCheckedException err = new IgniteCheckedException("Failed to start/stop cache, cluster state change " +
"is in progress.");
for (DynamicCacheChangeRequest req : batch.requests()) {
if (req.template()) {
ctx.cache().completeTemplateAddFuture(req.startCacheConfiguration().getName(),
req.deploymentId());
}
else
ctx.cache().completeCacheStartFuture(req, false, err);
}
return false;
}
}
/**
* @param exchangeActions Exchange actions to update.
* @param reqs Requests.
* @param topVer Topology version.
* @param persistedCfgs {@code True} if process start of persisted caches during cluster activation.
* @return Process result.
*/
private CacheChangeProcessResult processCacheChangeRequests(
ExchangeActions exchangeActions,
Collection<DynamicCacheChangeRequest> reqs,
AffinityTopologyVersion topVer,
boolean persistedCfgs) {
CacheChangeProcessResult res = new CacheChangeProcessResult();
final List<T2<DynamicCacheChangeRequest, AffinityTopologyVersion>> reqsToComplete = new ArrayList<>();
for (DynamicCacheChangeRequest req : reqs) {
if (req.template()) {
CacheConfiguration ccfg = req.startCacheConfiguration();
assert ccfg != null : req;
DynamicCacheDescriptor desc = registeredTemplates.get(req.cacheName());
if (desc == null) {
DynamicCacheDescriptor templateDesc = new DynamicCacheDescriptor(ctx,
ccfg,
req.cacheType(),
null,
true,
req.initiatingNodeId(),
false,
false,
req.deploymentId(),
req.schema());
DynamicCacheDescriptor old = registeredTemplates().put(ccfg.getName(), templateDesc);
assert old == null;
res.addedDescs.add(templateDesc);
}
if (!persistedCfgs)
ctx.cache().completeTemplateAddFuture(ccfg.getName(), req.deploymentId());
continue;
}
assert !req.clientStartOnly() : req;
DynamicCacheDescriptor desc = registeredCaches.get(req.cacheName());
boolean needExchange = false;
boolean clientCacheStart = false;
AffinityTopologyVersion waitTopVer = null;
if (req.start()) {
// Starting a new cache.
if (desc == null) {
String conflictErr = checkCacheConflict(req.startCacheConfiguration());
if (conflictErr != null) {
U.warn(log, "Ignore cache start request. " + conflictErr);
IgniteCheckedException err = new IgniteCheckedException("Failed to start " +
"cache. " + conflictErr);
if (persistedCfgs)
res.errs.add(err);
else
ctx.cache().completeCacheStartFuture(req, false, err);
continue;
}
if (req.clientStartOnly()) {
assert !persistedCfgs;
ctx.cache().completeCacheStartFuture(req, false, new IgniteCheckedException("Failed to start " +
"client cache (a cache with the given name is not started): " + req.cacheName()));
}
else {
SchemaOperationException err = QueryUtils.checkQueryEntityConflicts(
req.startCacheConfiguration(), registeredCaches.values());
if (err != null) {
if (persistedCfgs)
res.errs.add(err);
else
ctx.cache().completeCacheStartFuture(req, false, err);
continue;
}
CacheConfiguration<?, ?> ccfg = req.startCacheConfiguration();
assert req.cacheType() != null : req;
assert F.eq(ccfg.getName(), req.cacheName()) : req;
int cacheId = CU.cacheId(req.cacheName());
CacheGroupDescriptor grpDesc = registerCacheGroup(exchangeActions,
topVer,
ccfg,
cacheId,
req.initiatingNodeId(),
req.deploymentId());
DynamicCacheDescriptor startDesc = new DynamicCacheDescriptor(ctx,
ccfg,
req.cacheType(),
grpDesc,
false,
req.initiatingNodeId(),
false,
req.sql(),
req.deploymentId(),
req.schema());
DynamicCacheDescriptor old = registeredCaches.put(ccfg.getName(), startDesc);
restartingCaches.remove(ccfg.getName());
assert old == null;
ctx.discovery().setCacheFilter(
startDesc.cacheId(),
grpDesc.groupId(),
ccfg.getName(),
ccfg.getNearConfiguration() != null);
if (!persistedCfgs) {
ctx.discovery().addClientNode(req.cacheName(),
req.initiatingNodeId(),
req.nearCacheConfiguration() != null);
}
res.addedDescs.add(startDesc);
exchangeActions.addCacheToStart(req, startDesc);
needExchange = true;
}
}
else {
assert !persistedCfgs;
assert req.initiatingNodeId() != null : req;
if (req.failIfExists()) {
ctx.cache().completeCacheStartFuture(req, false,
new CacheExistsException("Failed to start cache " +
"(a cache with the same name is already started): " + req.cacheName()));
}
else {
// Cache already exists, it is possible client cache is needed.
ClusterNode node = ctx.discovery().node(req.initiatingNodeId());
boolean clientReq = node != null &&
!ctx.discovery().cacheAffinityNode(node, req.cacheName());
if (clientReq) {
ctx.discovery().addClientNode(req.cacheName(),
req.initiatingNodeId(),
req.nearCacheConfiguration() != null);
if (node.id().equals(req.initiatingNodeId())) {
desc.clientCacheStartVersion(topVer);
clientCacheStart = true;
ctx.discovery().clientCacheStartEvent(req.requestId(), F.asMap(req.cacheName(), req), null);
}
}
}
}
if (!needExchange && !clientCacheStart && desc != null) {
if (desc.clientCacheStartVersion() != null)
waitTopVer = desc.clientCacheStartVersion();
else {
AffinityTopologyVersion nodeStartVer =
new AffinityTopologyVersion(ctx.discovery().localNode().order(), 0);
if (desc.startTopologyVersion() != null)
waitTopVer = desc.startTopologyVersion();
else
waitTopVer = desc.receivedFromStartVersion();
if (waitTopVer == null || nodeStartVer.compareTo(waitTopVer) > 0)
waitTopVer = nodeStartVer;
}
}
}
else if (req.resetLostPartitions()) {
if (desc != null) {
needExchange = true;
exchangeActions.addCacheToResetLostPartitions(req, desc);
}
}
else if (req.stop()) {
if (desc != null) {
if (req.sql() && !desc.sql()) {
ctx.cache().completeCacheStartFuture(req, false,
new IgniteCheckedException("Only cache created with CREATE TABLE may be removed with " +
"DROP TABLE [cacheName=" + req.cacheName() + ']'));
continue;
}
if (!req.sql() && desc.sql()) {
ctx.cache().completeCacheStartFuture(req, false,
new IgniteCheckedException("Only cache created with cache API may be removed with " +
"direct call to destroyCache [cacheName=" + req.cacheName() + ']'));
continue;
}
DynamicCacheDescriptor old = registeredCaches.remove(req.cacheName());
if (req.restart())
restartingCaches.add(req.cacheName());
assert old != null && old == desc : "Dynamic cache map was concurrently modified [req=" + req + ']';
ctx.discovery().removeCacheFilter(req.cacheName());
needExchange = true;
exchangeActions.addCacheToStop(req, desc);
CacheGroupDescriptor grpDesc = registeredCacheGrps.get(desc.groupId());
assert grpDesc != null && grpDesc.groupId() == desc.groupId() : desc;
grpDesc.onCacheStopped(desc.cacheName(), desc.cacheId());
if (!grpDesc.hasCaches()) {
registeredCacheGrps.remove(grpDesc.groupId());
ctx.discovery().removeCacheGroup(grpDesc);
exchangeActions.addCacheGroupToStop(grpDesc, req.destroy());
assert exchangeActions.checkStopRequestConsistency(grpDesc.groupId());
// If all caches in group will be destroyed it is not necessary to destroy single cache
// because group will be stopped anyway.
if (req.destroy()) {
for (ExchangeActions.CacheActionData action : exchangeActions.cacheStopRequests()) {
if (action.descriptor().groupId() == grpDesc.groupId())
action.request().destroy(false);
}
}
}
}
}
else
assert false : req;
if (!needExchange) {
if (!clientCacheStart && ctx.localNodeId().equals(req.initiatingNodeId()))
reqsToComplete.add(new T2<>(req, waitTopVer));
}
else
res.needExchange = true;
}
if (!F.isEmpty(res.addedDescs)) {
AffinityTopologyVersion startTopVer = res.needExchange ? topVer.nextMinorVersion() : topVer;
for (DynamicCacheDescriptor desc : res.addedDescs) {
assert desc.template() || res.needExchange;
desc.startTopologyVersion(startTopVer);
}
}
if (!F.isEmpty(reqsToComplete)) {
ctx.closure().callLocalSafe(new Callable<Void>() {
@Override public Void call() throws Exception {
for (T2<DynamicCacheChangeRequest, AffinityTopologyVersion> t : reqsToComplete) {
final DynamicCacheChangeRequest req = t.get1();
AffinityTopologyVersion waitTopVer = t.get2();
IgniteInternalFuture<?> fut = waitTopVer != null ?
ctx.cache().context().exchange().affinityReadyFuture(waitTopVer) : null;
if (fut == null || fut.isDone())
ctx.cache().completeCacheStartFuture(req, false, null);
else {
fut.listen(new IgniteInClosure<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> fut) {
ctx.cache().completeCacheStartFuture(req, false, null);
}
});
}
}
return null;
}
});
}
return res;
}
/**
* @param dataBag Discovery data bag.
*/
void collectJoiningNodeData(DiscoveryDataBag dataBag) {
if (!ctx.isDaemon())
dataBag.addJoiningNodeData(CACHE_PROC.ordinal(), joinDiscoveryData());
}
/**
* @return {@code True} if there are currently restarting caches.
*/
boolean hasRestartingCaches() {
return !F.isEmpty(restartingCaches);
}
/**
* @return Collection of currently restarting caches.
*/
Collection<String> restartingCaches() {
return restartingCaches;
}
/**
* @return Discovery date sent on local node join.
*/
private Serializable joinDiscoveryData() {
if (cachesOnDisconnect != null) {
Map<Integer, CacheClientReconnectDiscoveryData.CacheGroupInfo> cacheGrpsInfo = new HashMap<>();
Map<String, CacheClientReconnectDiscoveryData.CacheInfo> cachesInfo = new HashMap<>();
Map<Integer, CacheGroupDescriptor> grps = cachesOnDisconnect.cacheGrps;
Map<String, DynamicCacheDescriptor> caches = cachesOnDisconnect.caches;
for (CacheGroupContext grp : ctx.cache().cacheGroups()) {
CacheGroupDescriptor desc = grps.get(grp.groupId());
assert desc != null : grp.cacheOrGroupName();
cacheGrpsInfo.put(grp.groupId(), new CacheClientReconnectDiscoveryData.CacheGroupInfo(desc.config(),
desc.deploymentId(),
0));
}
for (IgniteInternalCache cache : ctx.cache().caches()) {
DynamicCacheDescriptor desc = caches.get(cache.name());
assert desc != null : cache.name();
cachesInfo.put(cache.name(), new CacheClientReconnectDiscoveryData.CacheInfo(desc.cacheConfiguration(),
desc.cacheType(),
desc.deploymentId(),
cache.context().isNear(),
0));
}
return new CacheClientReconnectDiscoveryData(cacheGrpsInfo, cachesInfo);
}
else {
assert ctx.config().isDaemon() || joinDiscoData != null;
return joinDiscoData;
}
}
/**
* Called from exchange worker.
*
* @return Caches to be started when this node starts.
*/
@NotNull public List<T2<DynamicCacheDescriptor, NearCacheConfiguration>> cachesToStartOnLocalJoin() {
if (ctx.isDaemon())
return Collections.emptyList();
List<T2<DynamicCacheDescriptor, NearCacheConfiguration>> result = locJoinStartCaches;
locJoinStartCaches = Collections.emptyList();
return result;
}
/**
* @param joinedNodeId Joined node ID.
* @return {@code True} if there are new caches received from joined node.
*/
boolean hasCachesReceivedFromJoin(UUID joinedNodeId) {
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
if (desc.staticallyConfigured()) {
assert desc.receivedFrom() != null : desc;
if (joinedNodeId.equals(desc.receivedFrom()))
return true;
}
}
return false;
}
/**
* @param joinedNodeId Joined node ID.
* @return New caches received from joined node.
*/
List<DynamicCacheDescriptor> cachesReceivedFromJoin(UUID joinedNodeId) {
assert joinedNodeId != null;
List<DynamicCacheDescriptor> started = null;
if (!ctx.isDaemon()) {
for (DynamicCacheDescriptor desc : orderedCaches(CacheComparators.DIRECT)) {
if (desc.staticallyConfigured()) {
assert desc.receivedFrom() != null : desc;
if (joinedNodeId.equals(desc.receivedFrom())) {
if (started == null)
started = new ArrayList<>();
started.add(desc);
}
}
}
}
return started != null ? started : Collections.<DynamicCacheDescriptor>emptyList();
}
/**
* Discovery event callback, executed from discovery thread.
*
* @param type Event type.
* @param node Event node.
* @param topVer Topology version.
*/
public void onDiscoveryEvent(int type, ClusterNode node, AffinityTopologyVersion topVer) {
if (type == EVT_NODE_JOINED && !ctx.isDaemon()) {
for (CacheGroupDescriptor desc : registeredCacheGrps.values()) {
if (node.id().equals(desc.receivedFrom()))
desc.receivedFromStartVersion(topVer);
}
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
if (node.id().equals(desc.receivedFrom()))
desc.receivedFromStartVersion(topVer);
}
for (DynamicCacheDescriptor desc : registeredTemplates.values()) {
if (node.id().equals(desc.receivedFrom()))
desc.receivedFromStartVersion(topVer);
}
if (node.id().equals(ctx.discovery().localNode().id())) {
if (gridData == null) { // First node starts.
assert joinDiscoData != null;
initStartCachesForLocalJoin(true);
}
}
}
}
/**
* @param dataBag Discovery data bag.
*/
public void collectGridNodeData(DiscoveryDataBag dataBag) {
if (ctx.isDaemon())
return;
if (!dataBag.commonDataCollectedFor(CACHE_PROC.ordinal()))
dataBag.addGridCommonData(CACHE_PROC.ordinal(), collectCommonDiscoveryData());
}
/**
* @return Information about started caches.
*/
private CacheNodeCommonDiscoveryData collectCommonDiscoveryData() {
Map<Integer, CacheGroupData> cacheGrps = new HashMap<>();
for (CacheGroupDescriptor grpDesc : registeredCacheGrps.values()) {
CacheGroupData grpData = new CacheGroupData(grpDesc.config(),
grpDesc.groupName(),
grpDesc.groupId(),
grpDesc.receivedFrom(),
grpDesc.startTopologyVersion(),
grpDesc.deploymentId(),
grpDesc.caches(),
0);
cacheGrps.put(grpDesc.groupId(), grpData);
}
Map<String, CacheData> caches = new HashMap<>();
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
CacheData cacheData = new CacheData(desc.cacheConfiguration(),
desc.cacheId(),
desc.groupId(),
desc.cacheType(),
desc.deploymentId(),
desc.schema(),
desc.receivedFrom(),
desc.staticallyConfigured(),
desc.sql(),
false,
0);
caches.put(desc.cacheName(), cacheData);
}
Map<String, CacheData> templates = new HashMap<>();
for (DynamicCacheDescriptor desc : registeredTemplates.values()) {
CacheData cacheData = new CacheData(desc.cacheConfiguration(),
0,
0,
desc.cacheType(),
desc.deploymentId(),
desc.schema(),
desc.receivedFrom(),
desc.staticallyConfigured(),
false,
true,
0);
templates.put(desc.cacheName(), cacheData);
}
Collection<String> restarting = new HashSet<>(restartingCaches);
return new CacheNodeCommonDiscoveryData(caches,
templates,
cacheGrps,
ctx.discovery().clientNodesMap(),
restarting);
}
/**
* @param data Discovery data.
*/
public void onGridDataReceived(DiscoveryDataBag.GridDiscoveryData data) {
if (ctx.isDaemon() || data.commonData() == null)
return;
assert joinDiscoData != null || disconnectedState();
assert data.commonData() instanceof CacheNodeCommonDiscoveryData : data;
CacheNodeCommonDiscoveryData cachesData = (CacheNodeCommonDiscoveryData)data.commonData();
// CacheGroup configurations that were created from local node configuration.
Map<Integer, CacheGroupDescriptor> locCacheGrps = new HashMap<>(registeredCacheGroups());
// Replace locally registered data with actual data received from cluster.
registeredCaches.clear();
registeredCacheGrps.clear();
ctx.discovery().cleanCachesAndGroups();
for (CacheGroupData grpData : cachesData.cacheGroups().values()) {
CacheGroupDescriptor grpDesc = new CacheGroupDescriptor(
grpData.config(),
grpData.groupName(),
grpData.groupId(),
grpData.receivedFrom(),
grpData.startTopologyVersion(),
grpData.deploymentId(),
grpData.caches());
if (locCacheGrps.containsKey(grpDesc.groupId())) {
CacheGroupDescriptor locGrpCfg = locCacheGrps.get(grpDesc.groupId());
grpDesc.mergeWith(locGrpCfg);
}
CacheGroupDescriptor old = registeredCacheGrps.put(grpDesc.groupId(), grpDesc);
assert old == null : old;
ctx.discovery().addCacheGroup(grpDesc,
grpData.config().getNodeFilter(),
grpData.config().getCacheMode());
}
for (CacheData cacheData : cachesData.templates().values()) {
DynamicCacheDescriptor desc = new DynamicCacheDescriptor(
ctx,
cacheData.cacheConfiguration(),
cacheData.cacheType(),
null,
true,
cacheData.receivedFrom(),
cacheData.staticallyConfigured(),
false,
cacheData.deploymentId(),
cacheData.schema());
registeredTemplates.put(cacheData.cacheConfiguration().getName(), desc);
}
for (CacheData cacheData : cachesData.caches().values()) {
CacheGroupDescriptor grpDesc = registeredCacheGrps.get(cacheData.groupId());
assert grpDesc != null : cacheData.cacheConfiguration().getName();
CacheConfiguration<?, ?> cfg = cacheData.cacheConfiguration();
DynamicCacheDescriptor desc = new DynamicCacheDescriptor(
ctx,
cacheData.cacheConfiguration(),
cacheData.cacheType(),
grpDesc,
false,
cacheData.receivedFrom(),
cacheData.staticallyConfigured(),
cacheData.sql(),
cacheData.deploymentId(),
cacheData.schema());
desc.receivedOnDiscovery(true);
registeredCaches.put(cacheData.cacheConfiguration().getName(), desc);
ctx.discovery().setCacheFilter(
desc.cacheId(),
grpDesc.groupId(),
cfg.getName(),
cfg.getNearConfiguration() != null);
}
if (!F.isEmpty(cachesData.clientNodesMap())) {
for (Map.Entry<String, Map<UUID, Boolean>> entry : cachesData.clientNodesMap().entrySet()) {
String cacheName = entry.getKey();
for (Map.Entry<UUID, Boolean> tup : entry.getValue().entrySet())
ctx.discovery().addClientNode(cacheName, tup.getKey(), tup.getValue());
}
}
String conflictErr = null;
if (joinDiscoData != null) {
for (Map.Entry<String, CacheJoinNodeDiscoveryData.CacheInfo> e : joinDiscoData.caches().entrySet()) {
if (!registeredCaches.containsKey(e.getKey())) {
conflictErr = checkCacheConflict(e.getValue().cacheData().config());
if (conflictErr != null) {
conflictErr = "Failed to start configured cache due to conflict with started caches. " +
conflictErr;
break;
}
}
}
}
gridData = new GridData(joinDiscoData, cachesData, conflictErr);
if (!disconnectedState())
initStartCachesForLocalJoin(false);
else
locJoinStartCaches = Collections.emptyList();
}
/**
* Initialize collection with caches to be start:
* {@code locJoinStartCaches} or {@code locCfgsForActivation} if cluster is inactive.
*
* @param firstNode {@code True} if first node in cluster starts.
*/
private void initStartCachesForLocalJoin(boolean firstNode) {
assert F.isEmpty(locJoinStartCaches) : locJoinStartCaches;
if (ctx.state().clusterState().transition()) {
joinOnTransition = true;
return;
}
if (joinDiscoData != null) {
locJoinStartCaches = new ArrayList<>();
locCfgsForActivation = new HashMap<>();
boolean active = ctx.state().clusterState().active();
for (DynamicCacheDescriptor desc : orderedCaches(CacheComparators.DIRECT)) {
if (firstNode && !joinDiscoData.caches().containsKey(desc.cacheName()))
continue;
CacheConfiguration<?, ?> cfg = desc.cacheConfiguration();
CacheJoinNodeDiscoveryData.CacheInfo locCfg = joinDiscoData.caches().get(cfg.getName());
NearCacheConfiguration nearCfg = null;
if (locCfg != null) {
nearCfg = locCfg.cacheData().config().getNearConfiguration();
DynamicCacheDescriptor desc0 = new DynamicCacheDescriptor(ctx,
locCfg.cacheData().config(),
desc.cacheType(),
desc.groupDescriptor(),
desc.template(),
desc.receivedFrom(),
desc.staticallyConfigured(),
desc.sql(),
desc.deploymentId(),
new QuerySchema(locCfg.cacheData().queryEntities()));
desc0.startTopologyVersion(desc.startTopologyVersion());
desc0.receivedFromStartVersion(desc.receivedFromStartVersion());
desc0.clientCacheStartVersion(desc.clientCacheStartVersion());
desc = desc0;
}
if (locCfg != null ||
joinDiscoData.startCaches() ||
CU.affinityNode(ctx.discovery().localNode(), desc.groupDescriptor().config().getNodeFilter())) {
if (active)
locJoinStartCaches.add(new T2<>(desc, nearCfg));
else
locCfgsForActivation.put(desc.cacheName(), new T2<>(desc.cacheConfiguration(), nearCfg));
}
}
}
}
/**
* @param msg Message.
*/
public void onStateChangeFinish(ChangeGlobalStateFinishMessage msg) {
if (joinOnTransition) {
initStartCachesForLocalJoin(false);
joinOnTransition = false;
}
}
/**
* @param msg Message.
* @param topVer Current topology version.
* @return Exchange action.
* @throws IgniteCheckedException If configuration validation failed.
*/
public ExchangeActions onStateChangeRequest(ChangeGlobalStateMessage msg, AffinityTopologyVersion topVer)
throws IgniteCheckedException {
ExchangeActions exchangeActions = new ExchangeActions();
if (msg.activate()) {
for (DynamicCacheDescriptor desc : orderedCaches(CacheComparators.DIRECT)) {
desc.startTopologyVersion(topVer);
DynamicCacheChangeRequest req = new DynamicCacheChangeRequest(msg.requestId(),
desc.cacheName(),
msg.initiatorNodeId());
req.startCacheConfiguration(desc.cacheConfiguration());
req.cacheType(desc.cacheType());
T2<CacheConfiguration, NearCacheConfiguration> locCfg = locCfgsForActivation.get(desc.cacheName());
if (locCfg != null) {
if (locCfg.get1() != null)
req.startCacheConfiguration(locCfg.get1());
req.nearCacheConfiguration(locCfg.get2());
req.locallyConfigured(true);
}
exchangeActions.addCacheToStart(req, desc);
}
for (CacheGroupDescriptor grpDesc : registeredCacheGroups().values())
exchangeActions.addCacheGroupToStart(grpDesc);
List<StoredCacheData> storedCfgs = msg.storedCacheConfigurations();
if (storedCfgs != null) {
List<DynamicCacheChangeRequest> reqs = new ArrayList<>();
IgniteUuid deplymentId = IgniteUuid.fromUuid(msg.requestId());
for (StoredCacheData storedCfg : storedCfgs) {
CacheConfiguration ccfg = storedCfg.config();
if (!registeredCaches.containsKey(ccfg.getName())) {
DynamicCacheChangeRequest req = new DynamicCacheChangeRequest(msg.requestId(),
ccfg.getName(),
msg.initiatorNodeId());
req.deploymentId(deplymentId);
req.startCacheConfiguration(ccfg);
req.cacheType(ctx.cache().cacheType(ccfg.getName()));
req.schema(new QuerySchema(storedCfg.queryEntities()));
reqs.add(req);
}
}
CacheChangeProcessResult res = processCacheChangeRequests(exchangeActions, reqs, topVer, true);
if (!res.errs.isEmpty()) {
IgniteCheckedException err = new IgniteCheckedException("Failed to activate cluster.");
for (IgniteCheckedException err0 : res.errs)
err.addSuppressed(err0);
throw err;
}
}
}
else {
locCfgsForActivation = new HashMap<>();
for (DynamicCacheDescriptor desc : orderedCaches(CacheComparators.REVERSE)) {
DynamicCacheChangeRequest req = DynamicCacheChangeRequest.stopRequest(ctx,
desc.cacheName(),
desc.sql(),
false);
exchangeActions.addCacheToStop(req, desc);
if (ctx.discovery().cacheClientNode(ctx.discovery().localNode(), desc.cacheName()))
locCfgsForActivation.put(desc.cacheName(), new T2<>((CacheConfiguration)null, (NearCacheConfiguration)null));
}
for (CacheGroupDescriptor grpDesc : registeredCacheGroups().values())
exchangeActions.addCacheGroupToStop(grpDesc, false);
}
return exchangeActions;
}
/**
* @param data Joining node data.
*/
public void onJoiningNodeDataReceived(DiscoveryDataBag.JoiningNodeDiscoveryData data) {
if (data.hasJoiningNodeData()) {
Serializable joiningNodeData = data.joiningNodeData();
if (joiningNodeData instanceof CacheClientReconnectDiscoveryData) {
if (disconnectedState()) {
if (clientReconnectReqs == null)
clientReconnectReqs = new LinkedHashMap<>();
clientReconnectReqs.put(data.joiningNodeId(), (CacheClientReconnectDiscoveryData)joiningNodeData);
}
else
processClientReconnectData((CacheClientReconnectDiscoveryData)joiningNodeData, data.joiningNodeId());
}
else if (joiningNodeData instanceof CacheJoinNodeDiscoveryData)
processJoiningNode((CacheJoinNodeDiscoveryData)joiningNodeData, data.joiningNodeId(), false);
}
}
/**
* @param clientData Discovery data.
* @param clientNodeId Client node ID.
*/
private void processClientReconnectData(CacheClientReconnectDiscoveryData clientData, UUID clientNodeId) {
DiscoveryDataClusterState state = ctx.state().clusterState();
if (state.active() && !state.transition()) {
for (CacheClientReconnectDiscoveryData.CacheInfo cacheInfo : clientData.clientCaches().values()) {
String cacheName = cacheInfo.config().getName();
if (surviveReconnect(cacheName))
ctx.discovery().addClientNode(cacheName, clientNodeId, false);
else {
DynamicCacheDescriptor desc = registeredCaches.get(cacheName);
if (desc != null && desc.deploymentId().equals(cacheInfo.deploymentId()))
ctx.discovery().addClientNode(cacheName, clientNodeId, cacheInfo.nearCache());
}
}
}
}
/**
* Checks cache configuration on conflict with already registered caches and cache groups.
*
* @param cfg Cache configuration.
* @return {@code null} if validation passed, error message in other case.
*/
private String checkCacheConflict(CacheConfiguration<?, ?> cfg) {
int cacheId = CU.cacheId(cfg.getName());
if (cacheGroupByName(cfg.getName()) != null)
return "Cache name conflict with existing cache group (change cache name) [cacheName=" + cfg.getName() + ']';
if (cfg.getGroupName() != null) {
DynamicCacheDescriptor desc = registeredCaches.get(cfg.getGroupName());
if (desc != null)
return "Cache group name conflict with existing cache (change group name) [cacheName=" + cfg.getName() +
", conflictingCacheName=" + desc.cacheName() + ']';
}
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
if (desc.cacheId() == cacheId)
return "Cache ID conflict (change cache name) [cacheName=" + cfg.getName() +
", conflictingCacheName=" + desc.cacheName() + ']';
}
int grpId = cacheGroupId(cfg.getName(), cfg.getGroupName());
if (cfg.getGroupName() != null) {
if (cacheGroupByName(cfg.getGroupName()) == null) {
CacheGroupDescriptor desc = registeredCacheGrps.get(grpId);
if (desc != null)
return "Cache group ID conflict (change cache group name) [cacheName=" + cfg.getName() +
", groupName=" + cfg.getGroupName() +
(desc.sharedGroup() ? ", conflictingGroupName=" : ", conflictingCacheName=") + desc.cacheOrGroupName() + ']';
}
}
else {
CacheGroupDescriptor desc = registeredCacheGrps.get(grpId);
if (desc != null)
return "Cache group ID conflict (change cache name) [cacheName=" + cfg.getName() +
(desc.sharedGroup() ? ", conflictingGroupName=" : ", conflictingCacheName=") + desc.cacheOrGroupName() + ']';
}
return null;
}
/**
* @param joinData Joined node discovery data.
* @param nodeId Joined node ID.
* @param locJoin {@code True} if called on local node join.
* @return Configuration conflict error.
*/
private String processJoiningNode(CacheJoinNodeDiscoveryData joinData, UUID nodeId, boolean locJoin) {
for (CacheJoinNodeDiscoveryData.CacheInfo cacheInfo : joinData.templates().values()) {
CacheConfiguration<?, ?> cfg = cacheInfo.cacheData().config();
if (!registeredTemplates.containsKey(cfg.getName())) {
DynamicCacheDescriptor desc = new DynamicCacheDescriptor(ctx,
cfg,
cacheInfo.cacheType(),
null,
true,
nodeId,
true,
false,
joinData.cacheDeploymentId(),
new QuerySchema(cacheInfo.cacheData().queryEntities()));
DynamicCacheDescriptor old = registeredTemplates.put(cfg.getName(), desc);
assert old == null : old;
}
}
for (CacheJoinNodeDiscoveryData.CacheInfo cacheInfo : joinData.caches().values()) {
CacheConfiguration<?, ?> cfg = cacheInfo.cacheData().config();
if (!registeredCaches.containsKey(cfg.getName())) {
String conflictErr = checkCacheConflict(cfg);
if (conflictErr != null) {
if (locJoin)
return conflictErr;
U.warn(log, "Ignore cache received from joining node. " + conflictErr);
continue;
}
int cacheId = CU.cacheId(cfg.getName());
CacheGroupDescriptor grpDesc = registerCacheGroup(null,
null,
cfg,
cacheId,
nodeId,
joinData.cacheDeploymentId());
ctx.discovery().setCacheFilter(
cacheId,
grpDesc.groupId(),
cfg.getName(),
cfg.getNearConfiguration() != null);
DynamicCacheDescriptor desc = new DynamicCacheDescriptor(ctx,
cfg,
cacheInfo.cacheType(),
grpDesc,
false,
nodeId,
true,
cacheInfo.sql(),
joinData.cacheDeploymentId(),
new QuerySchema(cacheInfo.cacheData().queryEntities()));
DynamicCacheDescriptor old = registeredCaches.put(cfg.getName(), desc);
assert old == null : old;
}
ctx.discovery().addClientNode(cfg.getName(), nodeId, cfg.getNearConfiguration() != null);
}
if (joinData.startCaches()) {
for (DynamicCacheDescriptor desc : registeredCaches.values()) {
ctx.discovery().addClientNode(desc.cacheName(),
nodeId,
desc.cacheConfiguration().getNearConfiguration() != null);
}
}
return null;
}
/**
* @param grpName Group name.
* @return Group descriptor if group found.
*/
@Nullable private CacheGroupDescriptor cacheGroupByName(String grpName) {
assert grpName != null;
for (CacheGroupDescriptor grpDesc : registeredCacheGrps.values()) {
if (grpName.equals(grpDesc.groupName()))
return grpDesc;
}
return null;
}
/**
* @param cacheName Cache name.
* @return Group descriptor.
*/
@Nullable private CacheGroupDescriptor nonSharedCacheGroupByCacheName(String cacheName) {
assert cacheName != null;
for (CacheGroupDescriptor grpDesc : registeredCacheGrps.values()) {
if (!grpDesc.sharedGroup() && grpDesc.caches().containsKey(cacheName))
return grpDesc;
}
return null;
}
/**
* @param exchActions Optional exchange actions to update if new group was added.
* @param curTopVer Current topology version if dynamic cache started.
* @param startedCacheCfg Cache configuration.
* @param cacheId Cache ID.
* @param rcvdFrom Node ID cache was recived from.
* @param deploymentId Deployment ID.
* @return Group descriptor.
*/
private CacheGroupDescriptor registerCacheGroup(
@Nullable ExchangeActions exchActions,
@Nullable AffinityTopologyVersion curTopVer,
CacheConfiguration<?, ?> startedCacheCfg,
Integer cacheId,
UUID rcvdFrom,
IgniteUuid deploymentId) {
if (startedCacheCfg.getGroupName() != null) {
CacheGroupDescriptor desc = cacheGroupByName(startedCacheCfg.getGroupName());
if (desc != null) {
desc.onCacheAdded(startedCacheCfg.getName(), cacheId);
return desc;
}
}
int grpId = cacheGroupId(startedCacheCfg.getName(), startedCacheCfg.getGroupName());
Map<String, Integer> caches = Collections.singletonMap(startedCacheCfg.getName(), cacheId);
CacheGroupDescriptor grpDesc = new CacheGroupDescriptor(
startedCacheCfg,
startedCacheCfg.getGroupName(),
grpId,
rcvdFrom,
curTopVer != null ? curTopVer.nextMinorVersion() : null,
deploymentId,
caches);
CacheGroupDescriptor old = registeredCacheGrps.put(grpId, grpDesc);
assert old == null : old;
ctx.discovery().addCacheGroup(grpDesc, grpDesc.config().getNodeFilter(), startedCacheCfg.getCacheMode());
if (exchActions != null)
exchActions.addCacheGroupToStart(grpDesc);
return grpDesc;
}
/**
* @param ccfg Cache configuration to start.
* @throws IgniteCheckedException If failed.
*/
public void validateStartCacheConfiguration(CacheConfiguration ccfg) throws IgniteCheckedException {
if (ccfg.getGroupName() != null) {
CacheGroupDescriptor grpDesc = cacheGroupByName(ccfg.getGroupName());
if (grpDesc != null) {
assert ccfg.getGroupName().equals(grpDesc.groupName());
validateCacheGroupConfiguration(grpDesc.config(), ccfg);
}
}
}
/**
* @param cfg Existing configuration.
* @param startCfg Cache configuration to start.
* @throws IgniteCheckedException If validation failed.
*/
private void validateCacheGroupConfiguration(CacheConfiguration cfg, CacheConfiguration startCfg)
throws IgniteCheckedException {
GridCacheAttributes attr1 = new GridCacheAttributes(cfg);
GridCacheAttributes attr2 = new GridCacheAttributes(startCfg);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "cacheMode", "Cache mode",
cfg.getCacheMode(), startCfg.getCacheMode(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "affinity", "Affinity function",
attr1.cacheAffinityClassName(), attr2.cacheAffinityClassName(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "affinityPartitionsCount",
"Affinity partitions count", attr1.affinityPartitionsCount(), attr2.affinityPartitionsCount(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "nodeFilter", "Node filter",
attr1.nodeFilterClassName(), attr2.nodeFilterClassName(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "memoryPolicyName", "Memory policy",
cfg.getMemoryPolicyName(), startCfg.getMemoryPolicyName(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "topologyValidator", "Topology validator",
attr1.topologyValidatorClassName(), attr2.topologyValidatorClassName(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "partitionLossPolicy", "Partition Loss Policy",
cfg.getPartitionLossPolicy(), startCfg.getPartitionLossPolicy(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "rebalanceMode", "Rebalance mode",
cfg.getRebalanceMode(), startCfg.getRebalanceMode(), true);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "rebalanceDelay", "Rebalance delay",
cfg.getRebalanceDelay(), startCfg.getRebalanceDelay(), false);
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "rebalanceOrder", "Rebalance order",
cfg.getRebalanceOrder(), startCfg.getRebalanceOrder(), false);
if (cfg.getCacheMode() == PARTITIONED) {
CU.validateCacheGroupsAttributesMismatch(log, cfg, startCfg, "backups", "Backups",
cfg.getBackups(), startCfg.getBackups(), true);
}
}
/**
* @return Registered caches.
*/
ConcurrentMap<String, DynamicCacheDescriptor> registeredCaches() {
return registeredCaches;
}
/**
* @return Registered cache templates.
*/
ConcurrentMap<String, DynamicCacheDescriptor> registeredTemplates() {
return registeredTemplates;
}
/**
* @return Registered cache groups.
*/
ConcurrentMap<Integer, CacheGroupDescriptor> registeredCacheGroups() {
return registeredCacheGrps;
}
/**
* Returns registered cache descriptors ordered by {@code comparator}
* @param comparator Comparator (DIRECT, REVERSE or custom) to order cache descriptors.
* @return Ordered by comparator cache descriptors.
*/
private Collection<DynamicCacheDescriptor> orderedCaches(Comparator<DynamicCacheDescriptor> comparator) {
List<DynamicCacheDescriptor> ordered = new ArrayList<>();
ordered.addAll(registeredCaches.values());
Collections.sort(ordered, comparator);
return ordered;
}
/**
*
*/
public void onDisconnected() {
cachesOnDisconnect = new CachesOnDisconnect(
ctx.state().clusterState(),
new HashMap<>(registeredCacheGrps),
new HashMap<>(registeredCaches));
registeredCacheGrps.clear();
registeredCaches.clear();
registeredTemplates.clear();
clientReconnectReqs = null;
}
/**
* @param active {@code True} if reconnected to active cluster.
* @param transition {@code True} if reconnected while state transition in progress.
* @return Information about stopped caches and cache groups.
*/
public ClusterCachesReconnectResult onReconnected(boolean active, boolean transition) {
assert disconnectedState();
Set<String> stoppedCaches = new HashSet<>();
Set<Integer> stoppedCacheGrps = new HashSet<>();
if (!active) {
joinOnTransition = transition;
if (F.isEmpty(locCfgsForActivation)) {
locCfgsForActivation = new HashMap<>();
for (IgniteInternalCache cache : ctx.cache().caches()) {
locCfgsForActivation.put(cache.name(),
new T2<>((CacheConfiguration)null, cache.configuration().getNearConfiguration()));
}
}
for (Map.Entry<Integer, CacheGroupDescriptor> e : cachesOnDisconnect.cacheGrps.entrySet())
stoppedCacheGrps.add(e.getValue().groupId());
for (Map.Entry<String, DynamicCacheDescriptor> e : cachesOnDisconnect.caches.entrySet())
stoppedCaches.add(e.getKey());
}
else {
for (Map.Entry<Integer, CacheGroupDescriptor> e : cachesOnDisconnect.cacheGrps.entrySet()) {
CacheGroupDescriptor locDesc = e.getValue();
CacheGroupDescriptor desc;
boolean stopped = true;
if (locDesc.sharedGroup()) {
desc = cacheGroupByName(locDesc.groupName());
if (desc != null && desc.deploymentId().equals(locDesc.deploymentId()))
stopped = false;
}
else {
desc = nonSharedCacheGroupByCacheName(locDesc.config().getName());
if (desc != null &&
(surviveReconnect(locDesc.config().getName()) || desc.deploymentId().equals(locDesc.deploymentId())))
stopped = false;
}
if (stopped)
stoppedCacheGrps.add(locDesc.groupId());
else
assert locDesc.groupId() == desc.groupId();
}
for (Map.Entry<String, DynamicCacheDescriptor> e : cachesOnDisconnect.caches.entrySet()) {
DynamicCacheDescriptor desc = e.getValue();
String cacheName = e.getKey();
boolean stopped;
if (!surviveReconnect(cacheName)) {
DynamicCacheDescriptor newDesc = registeredCaches.get(cacheName);
stopped = newDesc == null || !desc.deploymentId().equals(newDesc.deploymentId());
}
else
stopped = false;
if (stopped)
stoppedCaches.add(cacheName);
}
if (!cachesOnDisconnect.clusterActive())
initStartCachesForLocalJoin(false);
}
if (clientReconnectReqs != null) {
for (Map.Entry<UUID, CacheClientReconnectDiscoveryData> e : clientReconnectReqs.entrySet())
processClientReconnectData(e.getValue(), e.getKey());
clientReconnectReqs = null;
}
cachesOnDisconnect = null;
return new ClusterCachesReconnectResult(stoppedCacheGrps, stoppedCaches);
}
/**
* @return {@code True} if client node is currently in disconnected state.
*/
private boolean disconnectedState() {
return cachesOnDisconnect != null;
}
/**
* @param cacheName Cache name.
* @return {@code True} if cache with given name if system cache which should always survive client node disconnect.
*/
private boolean surviveReconnect(String cacheName) {
return CU.isUtilityCache(cacheName);
}
/**
* Holds direct comparator (first system caches) and reverse comparator (first user caches).
* Use DIRECT comparator for ordering cache start operations.
* Use REVERSE comparator for ordering cache stop operations.
*/
private static class CacheComparators {
/**
* DIRECT comparator for cache descriptors (first system caches).
*/
static Comparator<DynamicCacheDescriptor> DIRECT = new Comparator<DynamicCacheDescriptor>() {
@Override public int compare(DynamicCacheDescriptor o1, DynamicCacheDescriptor o2) {
if (!o1.cacheType().userCache())
return -1;
if (!o2.cacheType().userCache())
return 1;
return o1.cacheId().compareTo(o2.cacheId());
}
};
/**
* REVERSE comparator for cache descriptors (first user caches).
*/
static Comparator<DynamicCacheDescriptor> REVERSE = new Comparator<DynamicCacheDescriptor>() {
@Override
public int compare(DynamicCacheDescriptor o1, DynamicCacheDescriptor o2) {
return -DIRECT.compare(o1, o2);
}
};
}
/**
*
*/
private static class GridData {
/** */
private final CacheJoinNodeDiscoveryData joinDiscoData;
/** */
private final CacheNodeCommonDiscoveryData gridData;
/** */
private final String conflictErr;
/**
* @param joinDiscoData Discovery data collected for local node join.
* @param gridData Grid data.
* @param conflictErr Cache configuration conflict error.
*/
GridData(CacheJoinNodeDiscoveryData joinDiscoData, CacheNodeCommonDiscoveryData gridData, String conflictErr) {
this.joinDiscoData = joinDiscoData;
this.gridData = gridData;
this.conflictErr = conflictErr;
}
}
/**
*
*/
private static class CachesOnDisconnect {
/** */
final DiscoveryDataClusterState state;
/** */
final Map<Integer, CacheGroupDescriptor> cacheGrps;
/** */
final Map<String, DynamicCacheDescriptor> caches;
/**
* @param state Cluster state.
* @param cacheGrps Cache groups.
* @param caches Caches.
*/
CachesOnDisconnect(DiscoveryDataClusterState state,
Map<Integer, CacheGroupDescriptor> cacheGrps,
Map<String, DynamicCacheDescriptor> caches) {
this.state = state;
this.cacheGrps = cacheGrps;
this.caches = caches;
}
/**
* @return {@code True} if cluster was in active state.
*/
boolean clusterActive() {
return state.active() && !state.transition();
}
}
/**
*
*/
private static class CacheChangeProcessResult {
/** */
private boolean needExchange;
/** */
private final List<DynamicCacheDescriptor> addedDescs = new ArrayList<>();
/** */
private final List<IgniteCheckedException> errs = new ArrayList<>();
}
}
|
|
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.control.service.loadmgmt;
import org.apache.thrift.TMultiplexedProcessor;
import org.apache.thrift.server.TServer;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.server.TThreadPoolServer.Args;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TServerTransport;
import org.apache.thrift.transport.TTransportException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.kaaproject.kaa.server.common.thrift.KaaThriftService;
import org.kaaproject.kaa.server.common.thrift.gen.bootstrap.BootstrapThriftService;
import org.kaaproject.kaa.server.common.zk.control.ControlNode;
import org.kaaproject.kaa.server.common.zk.gen.BootstrapNodeInfo;
import org.kaaproject.kaa.server.common.zk.gen.ConnectionInfo;
import org.kaaproject.kaa.server.common.zk.gen.LoadInfo;
import org.kaaproject.kaa.server.common.zk.gen.OperationsNodeInfo;
import org.kaaproject.kaa.server.common.zk.gen.TransportMetaData;
import org.kaaproject.kaa.server.control.service.loadmgmt.dynamicmgmt.EndpointCountRebalancer;
import org.kaaproject.kaa.server.control.service.zk.ControlZkService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* DynamicLoadManager Integration test, emulates new Bootstrap node adding.
*
* @author Andrey Panasenko
*
*/
public class TestDynamicLoadManagerIT {
private static final int DEFAULT_PRIORITY = 10;
/** The Constant LOG. */
private static final Logger LOG = LoggerFactory.getLogger(TestDynamicLoadManagerIT.class);
/** Thrift host for Bootstrap test service */
private static final String thriftHost = "localhost";
/** Thrift port for Bootstrap test service */
private static final int thriftPort = 9819;
/** Thread executor */
private static ExecutorService executor = null;
/** Bootstrap thrift test service runner */
private ThriftRunner bootstrapThrift;
private static LoadDistributionService ldServiceMock;
private static ControlZkService zkServiceMock;
private static ControlNode pNodeMock;
/**
* ThriftRunner Class. Used to run thrift servers.
*/
public class ThriftRunner implements Runnable {
private final String thriftHost;
private final int thriftPort;
private final BootstrapThriftServiceImpl bootstrapThriftService;
private boolean stopComplete = false;
private boolean startComplete = false;
private final Object stopSync;
private final Object startSync;
/** The server. */
private TServer server;
public ThriftRunner(String thriftHost, int thriftPort) {
this.thriftHost = thriftHost;
this.thriftPort = thriftPort;
this.stopSync = new Object();
this.startSync = new Object();
bootstrapThriftService = new BootstrapThriftServiceImpl();
}
/*
* (non-Javadoc)
*
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
LOG.info("Initializing Thrift Service for Bootstrap Server....");
LOG.info("thrift host: {}", thriftHost);
LOG.info("thrift port: {}", thriftPort);
try {
TMultiplexedProcessor processor = new TMultiplexedProcessor();
BootstrapThriftService.Processor<BootstrapThriftService.Iface> bootstrapProcessor = new BootstrapThriftService.Processor<BootstrapThriftService.Iface>(
bootstrapThriftService);
processor.registerProcessor(KaaThriftService.BOOTSTRAP_SERVICE.getServiceName(), bootstrapProcessor);
TServerTransport serverTransport = new TServerSocket(new InetSocketAddress(thriftHost, thriftPort));
server = new TThreadPoolServer(new Args(serverTransport).processor(processor));
LOG.info("Bootstrap test Server {}:{} Started.", thriftHost, thriftPort);
synchronized (startSync) {
startComplete = true;
startSync.notify();
}
server.serve();
LOG.info("Bootstrap test Server {}:{} Stopped.", thriftHost, thriftPort);
} catch (TTransportException e) {
LOG.error("TTransportException", e);
} finally {
synchronized (stopSync) {
stopComplete = true;
bootstrapThriftService.reset();
stopSync.notify();
}
}
}
public void waitStart() {
LOG.info("Bootstrap test Server {}:{} waitStart()", thriftHost, thriftPort);
synchronized (startSync) {
if (!startComplete) {
try {
startSync.wait(60000);
} catch (InterruptedException e) {
LOG.error("Interupted ThiftRunner startWait()", e);
}
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
LOG.error("Interupted ThiftRunner startWait() in sleep", e);
}
}
public void shutdown() {
LOG.info("Bootstrap test Server {}:{} shutdown()", thriftHost, thriftPort);
server.stop();
synchronized (stopSync) {
if (!stopComplete) {
try {
stopSync.wait(60000);
} catch (InterruptedException e) {
LOG.error("Interupted ThiftRunner shutdown", e);
}
}
}
}
public BootstrapThriftServiceImpl getBootstrapThriftServiceImpl() {
return bootstrapThriftService;
}
}
/**
* Initialize mock objects and necessary test services
*
* @throws Exception
*/
@BeforeClass
public static void init() throws Exception {
executor = Executors.newCachedThreadPool();
ldServiceMock = mock(LoadDistributionService.class);
zkServiceMock = mock(ControlZkService.class);
pNodeMock = mock(ControlNode.class);
when(ldServiceMock.getOpsServerHistoryTTL()).thenReturn(300);
when(ldServiceMock.getRebalancer()).thenReturn(new EndpointCountRebalancer());
when(ldServiceMock.getZkService()).thenReturn(zkServiceMock);
when(zkServiceMock.getControlZKNode()).thenReturn(pNodeMock);
}
/**
* Stops services.
*
* @throws Exception
*/
@AfterClass
public static void after() throws Exception {
executor.shutdown();
executor.awaitTermination(1, TimeUnit.MINUTES);
}
/**
* Start Bootstrap thrift service
*
* @throws Exception
*/
@Before
public void beforeTest() throws Exception {
bootstrapThrift = new ThriftRunner(thriftHost, thriftPort);
executor.execute(bootstrapThrift);
bootstrapThrift.waitStart();
}
/**
* Stop bootstrap hrift service
*
* @throws Exception
*/
@After
public void afterTest() throws Exception {
bootstrapThrift.shutdown();
}
/**
* Test Bootstrap node add.
*/
@Test
public void bootstrapNodeAddTest() {
LOG.info("bootstrapNodeAddTest started");
DynamicLoadManager dm = getDynamicLoadManager();
ConnectionInfo bsConnectionInfo = new ConnectionInfo(thriftHost, thriftPort, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsNode = getBootstrapNodeInfo(bsConnectionInfo);
dm.onNodeAdded(bsNode);
checkBSNode();
}
/**
* Test Bootstrap Node update
*/
@Test
public void bootstrapNodeUpdateTest() {
LOG.info("BootstrapNodeUpdateTest started");
DynamicLoadManager dm = getDynamicLoadManager();
ConnectionInfo bsErrConnectionInfo = new ConnectionInfo(thriftHost, thriftPort + 1, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsErrNode = getBootstrapNodeInfo(bsErrConnectionInfo);
dm.onNodeAdded(bsErrNode);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
fail(e.toString());
}
ConnectionInfo bsConnectionInfo = new ConnectionInfo(thriftHost, thriftPort, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsNode = getBootstrapNodeInfo(bsConnectionInfo);
dm.onNodeUpdated(bsNode);
dm.recalculate();
checkBSNode();
}
/**
* Test Bootstrap Node remove
*/
@Test
public void bootstrapNodeDeleteTest() {
LOG.info("BootstrapNodeUpdateTest started");
DynamicLoadManager dm = getDynamicLoadManager();
ConnectionInfo bsErrConnectionInfo = new ConnectionInfo(thriftHost, thriftPort + 1, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsErrNode = getBootstrapNodeInfo(bsErrConnectionInfo);
dm.onNodeAdded(bsErrNode);
ConnectionInfo bsConnectionInfo = new ConnectionInfo(thriftHost, thriftPort, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsNode = getBootstrapNodeInfo(bsConnectionInfo);
dm.onNodeAdded(bsNode);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
fail(e.toString());
}
dm.onNodeRemoved(bsErrNode);
dm.recalculate();
checkBSNode();
}
/**
* Test Operations Node Update Update with two phases, one with same
* ConnectionInfo DNS Name, second with changed ConnectionInfo DNS Name
*/
@Test
public void operationsNodeUpdateTest() {
LOG.info("BootstrapNodeUpdateTest started");
DynamicLoadManager dm = getDynamicLoadManager();
OperationsNodeInfo nodeInfo = generateOperationsNodeInfo("localhost", 1200, 9898, ByteBuffer.wrap("Just array modified".getBytes()), 1);
dm.onNodeUpdated(nodeInfo);
LOG.info("BootstrapNodeTest Operations Node {} updated.", nodeInfo.toString());
OperationsNodeInfo nodeInfo2 = generateOperationsNodeInfo("localhost", 1201, 9899,
ByteBuffer.wrap("Just array modified".getBytes()), 1);
dm.onNodeUpdated(nodeInfo2);
LOG.info("BootstrapNodeTest Operations Node {} updated.", nodeInfo.toString());
ConnectionInfo bsConnectionInfo = new ConnectionInfo(thriftHost, thriftPort, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsNode = getBootstrapNodeInfo(bsConnectionInfo);
dm.onNodeAdded(bsNode);
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl());
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap());
assertEquals(2, bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().size());
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().get("localhost:1200"));
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().get("localhost:1201"));
assertEquals((long) 10, (long) bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().get("localhost:1200")
.getPriority());
assertEquals((long) 10, (long) bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().get("localhost:1201")
.getPriority());
}
/**
* Test Operations Node Remove
*/
@Test
public void operationsNodeRemoveTest() {
LOG.info("BootstrapNodeRemoveTest started");
bootstrapThrift.getBootstrapThriftServiceImpl().reset();
DynamicLoadManager dm = getDynamicLoadManager();
ConnectionInfo bsConnectionInfo = new ConnectionInfo(thriftHost, thriftPort, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsNode = getBootstrapNodeInfo(bsConnectionInfo);
dm.onNodeAdded(bsNode);
checkBSNode();
OperationsNodeInfo nodeInfo = generateOperationsNodeInfo("localhost", 1201, 9899, ByteBuffer.wrap("Just".getBytes()), 1);
bootstrapThrift.getBootstrapThriftServiceImpl().reset();
dm.onNodeAdded(nodeInfo);
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap());
assertEquals(2, bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().size());
bootstrapThrift.getBootstrapThriftServiceImpl().reset();
dm.onNodeRemoved(nodeInfo);
checkBSNode();
}
@Ignore("TODO: FIX before merge to master")
@Test
public void sendRedirectionRuleTest() {
LOG.info("BootstrapNodeUpdateTest started");
DynamicLoadManager dm = getDynamicLoadManager();
long timeStarted = System.currentTimeMillis();
OperationsNodeInfo nodeInfo1 = getUpdatedOperationsNode(9898, 10, timeStarted);
OperationsNodeInfo nodeInfo2 = getUpdatedOperationsNode(9899, 0, timeStarted);
LOG.info("BootstrapNodeTest Operations Node {} updated. 1", nodeInfo1.toString());
dm.onNodeUpdated(nodeInfo1);
dm.onNodeUpdated(nodeInfo2);
nodeInfo1 = getUpdatedOperationsNode(9898, 30, timeStarted + 300000);
nodeInfo2 = getUpdatedOperationsNode(9899, 0, timeStarted + 300000);
LOG.info("BootstrapNodeTest Operations Node {} updated. 2", nodeInfo1.toString());
dm.onNodeUpdated(nodeInfo1);
dm.onNodeUpdated(nodeInfo2);
nodeInfo1 = getUpdatedOperationsNode(9898, 55, timeStarted + 600000);
nodeInfo2 = getUpdatedOperationsNode(9899, 0, timeStarted + 600000);
LOG.info("BootstrapNodeTest Operations Node {} updated. 3", nodeInfo1.toString());
dm.onNodeUpdated(nodeInfo1);
dm.onNodeUpdated(nodeInfo2);
ConnectionInfo bsConnectionInfo = new ConnectionInfo(thriftHost, thriftPort, ByteBuffer.wrap("Just array".getBytes()));
BootstrapNodeInfo bsNode = getBootstrapNodeInfo(bsConnectionInfo);
dm.onNodeAdded(bsNode);
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl());
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap());
assertEquals(2, bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().size());
bootstrapThrift.getBootstrapThriftServiceImpl().reset();
dm.recalculate();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
fail(e.toString());
}
}
private OperationsNodeInfo getUpdatedOperationsNode(int httpPort, int processRequestCount, long timeStart) {
Integer processedRequestCount = new Integer(processRequestCount);
OperationsNodeInfo nodeInfo = generateOperationsNodeInfo("localhost", 1200, httpPort, ByteBuffer.wrap("Just array".getBytes()),
processedRequestCount);
return nodeInfo;
}
private DynamicLoadManager getDynamicLoadManager() {
DynamicLoadManager dm = new DynamicLoadManager(ldServiceMock);
assertNotNull(dm);
Integer processedRequestCount = new Integer(0);
OperationsNodeInfo nodeInfo = generateOperationsNodeInfo("localhost", 1200, 9898, ByteBuffer.wrap("Just array".getBytes()),
processedRequestCount);
dm.onNodeAdded(nodeInfo);
LOG.info("BootstrapNodeTest Operations Node {} added.", nodeInfo.toString());
return dm;
}
private void checkBSNode() {
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl());
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap());
assertNotNull(bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().get("localhost:1200"));
assertEquals(DEFAULT_PRIORITY, bootstrapThrift.getBootstrapThriftServiceImpl().getOperatonsServerMap().get("localhost:1200")
.getPriority());
}
private OperationsNodeInfo generateOperationsNodeInfo(String thriftHost, int thriftPort, int httpPort, ByteBuffer publicKey,
int loadInfo) {
OperationsNodeInfo nodeInfo = new OperationsNodeInfo();
nodeInfo.setTimeStarted(System.currentTimeMillis());
nodeInfo.setTransports(new ArrayList<TransportMetaData>());
nodeInfo.setLoadInfo(new LoadInfo(loadInfo, 1.0));
nodeInfo.setConnectionInfo(new ConnectionInfo(thriftHost, thriftPort, publicKey));
return nodeInfo;
}
private BootstrapNodeInfo getBootstrapNodeInfo(ConnectionInfo bsConnectionInfo) {
BootstrapNodeInfo nodeInfo = new BootstrapNodeInfo();
nodeInfo.setConnectionInfo(bsConnectionInfo);
nodeInfo.setTimeStarted(System.currentTimeMillis());
nodeInfo.setTransports(new ArrayList<TransportMetaData>());
return nodeInfo;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming.state;
import static org.apache.flink.util.Preconditions.checkArgument;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.beam.runners.core.StateInternals;
import org.apache.beam.runners.core.StateNamespace;
import org.apache.beam.runners.core.StateTag;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderException;
import org.apache.beam.sdk.coders.ListCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.state.BagState;
import org.apache.beam.sdk.state.CombiningState;
import org.apache.beam.sdk.state.MapState;
import org.apache.beam.sdk.state.ReadableState;
import org.apache.beam.sdk.state.SetState;
import org.apache.beam.sdk.state.State;
import org.apache.beam.sdk.state.StateContext;
import org.apache.beam.sdk.state.ValueState;
import org.apache.beam.sdk.state.WatermarkHoldState;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.CombineWithContext;
import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
import org.apache.beam.sdk.util.CoderUtils;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.state.KeyGroupsList;
import org.apache.flink.runtime.state.KeyedStateBackend;
import org.apache.flink.util.InstantiationUtil;
import org.apache.flink.util.Preconditions;
/**
* {@link StateInternals} that uses {@link KeyGroupCheckpointedOperator} to checkpoint state.
*
* <p>Note: Ignore index of key. Just implement BagState.
*
* <p>Reference from Flink's HeapInternalTimerService to the local key-group range.
*/
public class FlinkKeyGroupStateInternals<K> implements StateInternals {
private final Coder<K> keyCoder;
private final KeyGroupsList localKeyGroupRange;
private KeyedStateBackend keyedStateBackend;
private final int localKeyGroupRangeStartIdx;
// stateName -> namespace -> (valueCoder, value)
private final Map<String, Tuple2<Coder<?>, Map<String, ?>>>[] stateTables;
public FlinkKeyGroupStateInternals(Coder<K> keyCoder, KeyedStateBackend keyedStateBackend) {
this.keyCoder = Preconditions.checkNotNull(keyCoder, "Coder for key must be provided.");
this.keyedStateBackend =
Preconditions.checkNotNull(
keyedStateBackend, "KeyedStateBackend must not be null. Missing keyBy call?");
this.localKeyGroupRange = keyedStateBackend.getKeyGroupRange();
// find the starting index of the local key-group range
int startIdx = Integer.MAX_VALUE;
for (Integer keyGroupIdx : localKeyGroupRange) {
startIdx = Math.min(keyGroupIdx, startIdx);
}
this.localKeyGroupRangeStartIdx = startIdx;
stateTables =
(Map<String, Tuple2<Coder<?>, Map<String, ?>>>[])
new Map[localKeyGroupRange.getNumberOfKeyGroups()];
for (int i = 0; i < stateTables.length; i++) {
stateTables[i] = new HashMap<>();
}
}
@Override
public K getKey() {
ByteBuffer keyBytes = (ByteBuffer) keyedStateBackend.getCurrentKey();
try {
byte[] bytes = new byte[keyBytes.remaining()];
keyBytes.get(bytes);
keyBytes.position(keyBytes.position() - bytes.length);
return CoderUtils.decodeFromByteArray(keyCoder, bytes);
} catch (CoderException e) {
throw new RuntimeException("Error decoding key.", e);
}
}
@Override
public <T extends State> T state(
final StateNamespace namespace, StateTag<T> address, final StateContext<?> context) {
return address.bind(
new StateTag.StateBinder() {
@Override
public <T2> ValueState<T2> bindValue(StateTag<ValueState<T2>> address, Coder<T2> coder) {
throw new UnsupportedOperationException(
String.format("%s is not supported", ValueState.class.getSimpleName()));
}
@Override
public <T2> BagState<T2> bindBag(StateTag<BagState<T2>> address, Coder<T2> elemCoder) {
return new FlinkKeyGroupBagState<>(address, namespace, elemCoder);
}
@Override
public <T2> SetState<T2> bindSet(StateTag<SetState<T2>> address, Coder<T2> elemCoder) {
throw new UnsupportedOperationException(
String.format("%s is not supported", SetState.class.getSimpleName()));
}
@Override
public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
StateTag<MapState<KeyT, ValueT>> spec,
Coder<KeyT> mapKeyCoder,
Coder<ValueT> mapValueCoder) {
throw new UnsupportedOperationException(
String.format("%s is not supported", MapState.class.getSimpleName()));
}
@Override
public <InputT, AccumT, OutputT>
CombiningState<InputT, AccumT, OutputT> bindCombiningValue(
StateTag<CombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder,
Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
throw new UnsupportedOperationException("bindCombiningValue is not supported.");
}
@Override
public <InputT, AccumT, OutputT>
CombiningState<InputT, AccumT, OutputT> bindCombiningValueWithContext(
StateTag<CombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder,
CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFn) {
throw new UnsupportedOperationException(
"bindCombiningValueWithContext is not supported.");
}
@Override
public WatermarkHoldState bindWatermark(
StateTag<WatermarkHoldState> address, TimestampCombiner timestampCombiner) {
throw new UnsupportedOperationException(
String.format("%s is not supported", CombiningState.class.getSimpleName()));
}
});
}
/**
* Reference from {@link Combine.CombineFn}.
*
* <p>Accumulators are stored in each KeyGroup, call addInput() when a element comes, call
* extractOutput() to produce the desired value when need to read data.
*/
interface KeyGroupCombiner<InputT, AccumT, OutputT> {
/**
* Returns a new, mutable accumulator value, representing the accumulation of zero input values.
*/
AccumT createAccumulator();
/** Adds the given input value to the given accumulator, returning the new accumulator value. */
AccumT addInput(AccumT accumulator, InputT input);
/**
* Returns the output value that is the result of all accumulators from KeyGroups that are
* assigned to this operator.
*/
OutputT extractOutput(Iterable<AccumT> accumulators);
}
private abstract class AbstractKeyGroupState<InputT, AccumT, OutputT> {
private String stateName;
private String namespace;
private Coder<AccumT> coder;
private KeyGroupCombiner<InputT, AccumT, OutputT> keyGroupCombiner;
AbstractKeyGroupState(
String stateName,
String namespace,
Coder<AccumT> coder,
KeyGroupCombiner<InputT, AccumT, OutputT> keyGroupCombiner) {
this.stateName = stateName;
this.namespace = namespace;
this.coder = coder;
this.keyGroupCombiner = keyGroupCombiner;
}
/** Choose keyGroup of input and addInput to accumulator. */
void addInput(InputT input) {
int keyGroupIdx = keyedStateBackend.getCurrentKeyGroupIndex();
int localIdx = getIndexForKeyGroup(keyGroupIdx);
Map<String, Tuple2<Coder<?>, Map<String, ?>>> stateTable = stateTables[localIdx];
Tuple2<Coder<?>, Map<String, ?>> tuple2 = stateTable.get(stateName);
if (tuple2 == null) {
tuple2 = new Tuple2<>();
tuple2.f0 = coder;
tuple2.f1 = new HashMap<>();
stateTable.put(stateName, tuple2);
}
Map<String, AccumT> map = (Map<String, AccumT>) tuple2.f1;
AccumT accumulator = map.get(namespace);
if (accumulator == null) {
accumulator = keyGroupCombiner.createAccumulator();
}
accumulator = keyGroupCombiner.addInput(accumulator, input);
map.put(namespace, accumulator);
}
/** Get all accumulators and invoke extractOutput(). */
OutputT extractOutput() {
List<AccumT> accumulators = new ArrayList<>(stateTables.length);
for (Map<String, Tuple2<Coder<?>, Map<String, ?>>> stateTable : stateTables) {
Tuple2<Coder<?>, Map<String, ?>> tuple2 = stateTable.get(stateName);
if (tuple2 != null) {
AccumT accumulator = (AccumT) tuple2.f1.get(namespace);
if (accumulator != null) {
accumulators.add(accumulator);
}
}
}
return keyGroupCombiner.extractOutput(accumulators);
}
/** Find the first accumulator and return immediately. */
boolean isEmptyInternal() {
for (Map<String, Tuple2<Coder<?>, Map<String, ?>>> stateTable : stateTables) {
Tuple2<Coder<?>, Map<String, ?>> tuple2 = stateTable.get(stateName);
if (tuple2 != null) {
AccumT accumulator = (AccumT) tuple2.f1.get(namespace);
if (accumulator != null) {
return false;
}
}
}
return true;
}
/** Clear accumulators and clean empty map. */
void clearInternal() {
for (Map<String, Tuple2<Coder<?>, Map<String, ?>>> stateTable : stateTables) {
Tuple2<Coder<?>, Map<String, ?>> tuple2 = stateTable.get(stateName);
if (tuple2 != null) {
tuple2.f1.remove(namespace);
if (tuple2.f1.isEmpty()) {
stateTable.remove(stateName);
}
}
}
}
}
private int getIndexForKeyGroup(int keyGroupIdx) {
checkArgument(
localKeyGroupRange.contains(keyGroupIdx),
"Key Group " + keyGroupIdx + " does not belong to the local range.");
return keyGroupIdx - this.localKeyGroupRangeStartIdx;
}
private static class KeyGroupBagCombiner<T> implements KeyGroupCombiner<T, List<T>, Iterable<T>> {
@Override
public List<T> createAccumulator() {
return new ArrayList<>();
}
@Override
public List<T> addInput(List<T> accumulator, T input) {
accumulator.add(input);
return accumulator;
}
@Override
public Iterable<T> extractOutput(Iterable<List<T>> accumulators) {
List<T> result = new ArrayList<>();
// maybe can return an unmodifiable view.
for (List<T> list : accumulators) {
result.addAll(list);
}
return result;
}
}
private class FlinkKeyGroupBagState<T> extends AbstractKeyGroupState<T, List<T>, Iterable<T>>
implements BagState<T> {
private final StateNamespace namespace;
private final StateTag<BagState<T>> address;
FlinkKeyGroupBagState(StateTag<BagState<T>> address, StateNamespace namespace, Coder<T> coder) {
super(
address.getId(), namespace.stringKey(), ListCoder.of(coder), new KeyGroupBagCombiner<>());
this.namespace = namespace;
this.address = address;
}
@Override
public void add(T input) {
addInput(input);
}
@Override
public BagState<T> readLater() {
return this;
}
@Override
public Iterable<T> read() {
Iterable<T> result = extractOutput();
return result != null ? result : Collections.emptyList();
}
@Override
public ReadableState<Boolean> isEmpty() {
return new ReadableState<Boolean>() {
@Override
public Boolean read() {
try {
return isEmptyInternal();
} catch (Exception e) {
throw new RuntimeException("Error reading state.", e);
}
}
@Override
public ReadableState<Boolean> readLater() {
return this;
}
};
}
@Override
public void clear() {
clearInternal();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FlinkKeyGroupBagState<?> that = (FlinkKeyGroupBagState<?>) o;
return namespace.equals(that.namespace) && address.equals(that.address);
}
@Override
public int hashCode() {
int result = namespace.hashCode();
result = 31 * result + address.hashCode();
return result;
}
}
/**
* Snapshots the state {@code (stateName -> (valueCoder && (namespace -> value)))} for a given
* {@code keyGroupIdx}.
*
* @param keyGroupIdx the id of the key-group to be put in the snapshot.
* @param out the stream to write to.
*/
public void snapshotKeyGroupState(int keyGroupIdx, DataOutputStream out) throws Exception {
int localIdx = getIndexForKeyGroup(keyGroupIdx);
Map<String, Tuple2<Coder<?>, Map<String, ?>>> stateTable = stateTables[localIdx];
Preconditions.checkState(
stateTable.size() <= Short.MAX_VALUE,
"Too many States: "
+ stateTable.size()
+ ". Currently at most "
+ Short.MAX_VALUE
+ " states are supported");
out.writeShort(stateTable.size());
for (Map.Entry<String, Tuple2<Coder<?>, Map<String, ?>>> entry : stateTable.entrySet()) {
out.writeUTF(entry.getKey());
Coder coder = entry.getValue().f0;
InstantiationUtil.serializeObject(out, coder);
Map<String, ?> map = entry.getValue().f1;
out.writeInt(map.size());
for (Map.Entry<String, ?> entry1 : map.entrySet()) {
StringUtf8Coder.of().encode(entry1.getKey(), out);
coder.encode(entry1.getValue(), out);
}
}
}
/**
* Restore the state {@code (stateName -> (valueCoder && (namespace -> value)))} for a given
* {@code keyGroupIdx}.
*
* @param keyGroupIdx the id of the key-group to be put in the snapshot.
* @param in the stream to read from.
* @param userCodeClassLoader the class loader that will be used to deserialize the valueCoder.
*/
public void restoreKeyGroupState(
int keyGroupIdx, DataInputStream in, ClassLoader userCodeClassLoader) throws Exception {
int localIdx = getIndexForKeyGroup(keyGroupIdx);
Map<String, Tuple2<Coder<?>, Map<String, ?>>> stateTable = stateTables[localIdx];
int numStates = in.readShort();
for (int i = 0; i < numStates; ++i) {
String stateName = in.readUTF();
Coder coder = InstantiationUtil.deserializeObject(in, userCodeClassLoader);
Tuple2<Coder<?>, Map<String, ?>> tuple2 = stateTable.get(stateName);
if (tuple2 == null) {
tuple2 = new Tuple2<>();
tuple2.f0 = coder;
tuple2.f1 = new HashMap<>();
stateTable.put(stateName, tuple2);
}
Map<String, Object> map = (Map<String, Object>) tuple2.f1;
int mapSize = in.readInt();
for (int j = 0; j < mapSize; j++) {
String namespace = StringUtf8Coder.of().decode(in);
Object value = coder.decode(in);
map.put(namespace, value);
}
}
}
}
|
|
/**
* Copyright (c) 2016 See AUTHORS file
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the mini2Dx nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.mini2Dx.core.geom;
import org.mini2Dx.core.exception.MdxException;
import org.mini2Dx.core.graphics.Graphics;
import org.mini2Dx.core.util.EdgeIterator;
import com.badlogic.gdx.math.EarClippingTriangulator;
import com.badlogic.gdx.math.Intersector;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.ShortArray;
/**
* Implements a rotatable polygon. Adds extra functionality to the default
* polygon implementation in LibGDX
*/
public class Polygon extends Shape {
private final EarClippingTriangulator triangulator;
private final PolygonEdgeIterator edgeIterator = new PolygonEdgeIterator();
private final PolygonEdgeIterator internalEdgeIterator = new PolygonEdgeIterator();
private final Vector2 tmp1 = new Vector2();
private final Vector2 tmp2 = new Vector2();
final com.badlogic.gdx.math.Polygon polygon;
private int totalSidesCache = -1;
private float minX, minY, maxX, maxY;
private ShortArray triangles;
private float trackedRotation = 0f;
private boolean isRectangle;
private boolean minMaxDirty = true;
private boolean trianglesDirty = true;
/**
* Constructor. Note that vertices must be in a clockwise order for
* performance and accuracy.
*
* @param vertices
* All points in x,y pairs. E.g. x1,y1,x2,y2,etc.
*/
public Polygon(float[] vertices) {
polygon = new com.badlogic.gdx.math.Polygon(vertices);
polygon.setOrigin(vertices[0], vertices[1]);
triangulator = new EarClippingTriangulator();
getNumberOfSides();
}
/**
* Constructor with vectors. Note that vectors must be in a clockwise order
* for performance and accuracy.
*
* @param points
* All points in the polygon
*/
public Polygon(Vector2[] points) {
this(toVertices(points));
}
public Polygon lerp(Polygon target, float alpha) {
final float inverseAlpha = 1.0f - alpha;
float [] currentVertices = polygon.getTransformedVertices();
float [] targetVertices = target.polygon.getTransformedVertices();
if(currentVertices.length != targetVertices.length) {
throw new MdxException("Cannot lerp polygons with different vertice amounts");
}
if(currentVertices[0] != targetVertices[0] || currentVertices[1] != targetVertices[1]) {
for(int i = 0; i < currentVertices.length; i += 2) {
currentVertices[i] = (currentVertices[i] * inverseAlpha) + (targetVertices[i] * alpha);
currentVertices[i + 1] = (currentVertices[i + 1] * inverseAlpha) + (targetVertices[i + 1] * alpha);
}
polygon.setOrigin(currentVertices[0], currentVertices[1]);
polygon.setVertices(currentVertices);
setDirty();
}
if(getRotation() != target.getRotation()) {
float rotation = (trackedRotation * inverseAlpha) + (target.getRotation() * alpha);
polygon.setRotation(rotation - trackedRotation);
trackedRotation = rotation;
setDirty();
}
return this;
}
@Override
public Shape copy() {
Polygon result = new Polygon(polygon.getTransformedVertices());
result.trackedRotation = trackedRotation;
return result;
}
private void clearTotalSidesCache() {
totalSidesCache = -1;
}
protected boolean triangleContains(float x, float y, float p1x, float p1y, float p2x, float p2y, float p3x,
float p3y) {
boolean b1, b2, b3;
b1 = sign(x, y, p1x, p1y, p2x, p2y) < 0.0f;
b2 = sign(x, y, p2x, p2y, p3x, p3y) < 0.0f;
b3 = sign(x, y, p3x, p3y, p1x, p1y) < 0.0f;
return ((b1 == b2) && (b2 == b3));
}
protected float sign(float x, float y, float p1x, float p1y, float p2x, float p2y) {
return (x - p2x) * (p1y - p2y) - (p1x - p2x) * (y - p2y);
}
@Override
public boolean contains(float x, float y) {
if (isRectangle) {
return triangleContains(x, y, polygon.getTransformedVertices()[0], polygon.getTransformedVertices()[1],
polygon.getTransformedVertices()[2], polygon.getTransformedVertices()[3],
polygon.getTransformedVertices()[6], polygon.getTransformedVertices()[7])
|| triangleContains(x, y, polygon.getTransformedVertices()[6], polygon.getTransformedVertices()[7],
polygon.getTransformedVertices()[2], polygon.getTransformedVertices()[3],
polygon.getTransformedVertices()[4], polygon.getTransformedVertices()[5]);
}
return polygon.contains(x, y);
}
@Override
public boolean contains(Vector2 vector2) {
return contains(vector2.x, vector2.y);
}
@Override
public boolean contains(Shape shape) {
if (shape.isCircle()) {
Rectangle circleBox = ((Circle) shape).getBoundingBox();
return contains(circleBox.getPolygon());
}
return contains(shape.getPolygon());
}
public boolean contains(Polygon polygon) {
return org.mini2Dx.core.geom.Intersector.containsPolygon(this, polygon);
}
@Override
public boolean intersects(Shape shape) {
if (shape.isCircle()) {
return intersects((Circle) shape);
}
return intersects(shape.getPolygon());
}
/**
* Returns if this {@link Polygon} intersects another
*
* @param polygon
* The other {@link Polygon}
* @return True if the two {@link Polygon}s intersect
*/
public boolean intersects(Polygon polygon) {
minMaxDirtyCheck();
polygon.minMaxDirtyCheck();
if (isRectangle && polygon.isRectangle) {
boolean xAxisOverlaps = true;
boolean yAxisOverlaps = true;
if (maxX < polygon.minX)
xAxisOverlaps = false;
if (polygon.maxX < minX)
xAxisOverlaps = false;
if (maxY < polygon.minY)
yAxisOverlaps = false;
if (polygon.maxY < minY)
yAxisOverlaps = false;
return xAxisOverlaps && yAxisOverlaps;
}
if (polygon.minX > maxX) {
return false;
}
if (polygon.maxX < minX) {
return false;
}
if (polygon.minY > maxY) {
return false;
}
if (polygon.maxY < minY) {
return false;
}
boolean result = false;
internalEdgeIterator.begin();
while (internalEdgeIterator.hasNext()) {
internalEdgeIterator.next();
if (polygon.intersects(internalEdgeIterator.getEdgeLineSegment())) {
result = true;
break;
}
}
internalEdgeIterator.end();
return result;
}
/**
* Returns if this {@link Polygon} intersects a {@link Triangle}
*
* @param triangle
* The {@link Triangle} to check
* @return True if this {@link Polygon} and {@link Triangle} intersect
*/
public boolean intersects(Triangle triangle) {
return intersects(triangle.polygon);
}
/**
* Returns if the specified {@link Rectangle} intersects this
* {@link Polygon}
*
* @param rectangle
* The {@link Rectangle} to check
* @return True if this {@link Polygon} and {@link Rectangle} intersect
*/
public boolean intersects(Rectangle rectangle) {
return intersects(rectangle.polygon);
}
public boolean intersects(Circle circle) {
if (isRectangle) {
minMaxDirtyCheck();
float closestX = circle.getX();
float closestY = circle.getY();
if (circle.getX() < minX) {
closestX = minX;
} else if (circle.getX() > maxX) {
closestX = maxX;
}
if (circle.getY() < minY) {
closestY = minY;
} else if (circle.getY() > maxY) {
closestY = maxY;
}
closestX = closestX - circle.getX();
closestX *= closestX;
closestY = closestY - circle.getY();
closestY *= closestY;
return closestX + closestY < circle.getRadius() * circle.getRadius();
}
boolean result = false;
internalEdgeIterator.begin();
while (internalEdgeIterator.hasNext()) {
internalEdgeIterator.next();
if (circle.intersectsLineSegment(internalEdgeIterator.getPointAX(), internalEdgeIterator.getPointAY(),
internalEdgeIterator.getPointBX(), internalEdgeIterator.getPointBY())) {
result = true;
break;
}
}
internalEdgeIterator.end();
return result;
}
@Override
public boolean intersects(LineSegment lineSegment) {
return intersectsLineSegment(lineSegment.getPointA(), lineSegment.getPointB());
}
@Override
public boolean intersectsLineSegment(Vector2 pointA, Vector2 pointB) {
return Intersector.intersectSegmentPolygon(pointA, pointB, polygon);
}
@Override
public boolean intersectsLineSegment(float x1, float y1, float x2, float y2) {
tmp1.set(x1, y1);
tmp2.set(x2, y2);
return Intersector.intersectSegmentPolygon(tmp1, tmp2, polygon);
}
@Override
public float getDistanceTo(float x, float y) {
float[] vertices = polygon.getTransformedVertices();
float result = Intersector.distanceSegmentPoint(vertices[vertices.length - 2], vertices[vertices.length - 1],
vertices[0], vertices[1], x, y);
for (int i = 0; i < vertices.length - 2; i += 2) {
float distance = Intersector.distanceSegmentPoint(vertices[i], vertices[i + 1], vertices[i + 2],
vertices[i + 3], x, y);
if (distance < result) {
result = distance;
}
}
return result;
}
/**
* Adds an additional point to this {@link Polygon}
*
* @param x
* The x coordinate
* @param y
* The y coordinate
*/
public void addPoint(float x, float y) {
float[] existingVertices = polygon.getTransformedVertices();
float[] newVertices = new float[existingVertices.length + 2];
if (existingVertices.length > 0) {
System.arraycopy(existingVertices, 0, newVertices, 0, existingVertices.length);
}
newVertices[existingVertices.length] = x;
newVertices[existingVertices.length + 1] = y;
polygon.translate(-polygon.getX(), -polygon.getY());
polygon.setVertices(newVertices);
clearTotalSidesCache();
setDirty();
}
/**
* Adds an additional point to this {@link Polygon}
*
* @param point
* The point to add as a {@link Vector2}
*/
public void addPoint(Vector2 point) {
addPoint(point.x, point.y);
}
private void removePoint(int i) {
float[] existingVertices = polygon.getTransformedVertices();
float[] newVertices = new float[existingVertices.length - 2];
if (i > 0) {
System.arraycopy(existingVertices, 0, newVertices, 0, i);
}
if (i < existingVertices.length - 2) {
System.arraycopy(existingVertices, i + 2, newVertices, i, existingVertices.length - i - 2);
}
polygon.translate(-polygon.getX(), -polygon.getY());
polygon.setVertices(newVertices);
setDirty();
clearTotalSidesCache();
}
/**
* Removes a point from this {@link Polygon}
*
* @param x
* The x coordinate
* @param y
* The y coordinate
*/
public void removePoint(float x, float y) {
float[] existingVertices = polygon.getTransformedVertices();
for (int i = 0; i < existingVertices.length; i += 2) {
if (existingVertices[i] != x) {
continue;
}
if (existingVertices[i + 1] != y) {
continue;
}
removePoint(i);
return;
}
}
/**
* Removes a point from this {@link Polygon}
*
* @param point
* The point to remove as a {@link Vector2}
*/
public void removePoint(Vector2 point) {
removePoint(point.x, point.y);
}
@Override
public int getNumberOfSides() {
if (totalSidesCache < 0) {
totalSidesCache = polygon.getTransformedVertices().length / 2;
isRectangle = totalSidesCache == 4;
}
return totalSidesCache;
}
@Override
public void draw(Graphics g) {
g.drawPolygon(polygon.getTransformedVertices());
}
@Override
public void fill(Graphics g) {
g.fillPolygon(polygon.getTransformedVertices(), getTriangles().items);
}
public float[] getVertices() {
return polygon.getTransformedVertices();
}
public void setVertices(float[] vertices) {
polygon.setOrigin(vertices[0], vertices[1]);
polygon.setRotation(0f);
polygon.setVertices(vertices);
trackedRotation = 0f;
clearTotalSidesCache();
setDirty();
}
public void setVertices(Vector2[] vertices) {
setVertices(toVertices(vertices));
}
@Override
public float getRotation() {
return trackedRotation;
}
@Override
public void setRotation(float degrees) {
setRotationAround(polygon.getTransformedVertices()[0], polygon.getTransformedVertices()[1], degrees);
}
@Override
public void rotate(float degrees) {
rotateAround(polygon.getTransformedVertices()[0], polygon.getTransformedVertices()[1], degrees);
}
@Override
public void setRotationAround(float centerX, float centerY, float degrees) {
if(trackedRotation == degrees && centerX == polygon.getOriginX() && centerY == polygon.getOriginY()) {
return;
}
polygon.setVertices(polygon.getTransformedVertices());
polygon.setOrigin(centerX, centerY);
polygon.setRotation(degrees - trackedRotation);
trackedRotation = degrees;
setDirty();
}
@Override
public void rotateAround(float centerX, float centerY, float degrees) {
if(degrees == 0f) {
return;
}
trackedRotation += degrees;
float[] vertices = polygon.getTransformedVertices();
polygon.setRotation(0);
polygon.setOrigin(centerX, centerY);
polygon.setVertices(vertices);
polygon.rotate(degrees);
setDirty();
}
/**
* Returns the x coordinate
*
* @return The x coordinate of the first point in this {@link Polygon}
*/
@Override
public float getX() {
return getX(0);
}
/**
* Returns the y coordinate
*
* @return The y coordinate of the first point in this {@link Polygon}
*/
@Override
public float getY() {
return getY(0);
}
/**
* Returns the x coordinate of the corner at the specified index
*
* @param index
* The point index
* @return The x coordinate of the corner
*/
public float getX(int index) {
return polygon.getTransformedVertices()[index * 2];
}
/**
* Returns the y coordinate of the corner at the specified index
*
* @param index
* The point index
* @return The y coordinate of the corner
*/
public float getY(int index) {
return polygon.getTransformedVertices()[(index * 2) + 1];
}
/**
* Returns min X coordinate of this {@link Polygon}
*
* @return The left-most x coordinate
*/
public float getMinX() {
minMaxDirtyCheck();
return minX;
}
/**
* Returns min Y coordinate of this {@link Polygon}
*
* @return The up-most y coordinate
*/
public float getMinY() {
minMaxDirtyCheck();
return minY;
}
/**
* Returns max X coordinate of this {@link Polygon}
*
* @return The right-most x coordinate
*/
public float getMaxX() {
minMaxDirtyCheck();
return maxX;
}
/**
* Returns max Y coordinate of this {@link Polygon}
*
* @return The bottom-most y coordinate
*/
public float getMaxY() {
minMaxDirtyCheck();
return maxY;
}
/**
* Returns an array of vertex indices that the define the triangles which
* make up this {@link Polygon}
*
* @return Array of triangle indices
*/
public ShortArray getTriangles() {
trianglesDirtyCheck();
return triangles;
}
private static float[] toVertices(Vector2[] points) {
if (points == null) {
throw new MdxException(Point.class.getSimpleName() + " array cannot be null");
}
if (points.length < 3) {
throw new MdxException(Point.class.getSimpleName() + " must have at least 3 points");
}
float[] result = new float[points.length * 2];
for (int i = 0; i < points.length; i++) {
int index = i * 2;
result[index] = points[i].x;
result[index + 1] = points[i].y;
}
return result;
}
@Override
public void setX(float x) {
if(x == getX()) {
return;
}
float[] vertices = polygon.getTransformedVertices();
float xDiff = x - getX();
for (int i = 0; i < vertices.length; i += 2) {
vertices[i] += xDiff;
}
polygon.setOrigin(x, getY());
polygon.setVertices(vertices);
setDirty();
}
@Override
public void setY(float y) {
if(y == getY()) {
return;
}
float[] vertices = polygon.getTransformedVertices();
float yDiff = y - getY();
for (int i = 1; i < vertices.length; i += 2) {
vertices[i] += yDiff;
}
polygon.setOrigin(getX(), y);
polygon.setVertices(vertices);
setDirty();
}
@Override
public void set(float x, float y) {
if(x == getX() && y == getY()) {
return;
}
float[] vertices = polygon.getTransformedVertices();
float xDiff = x - getX();
float yDiff = y - getY();
for (int i = 0; i < vertices.length; i += 2) {
vertices[i] += xDiff;
vertices[i + 1] += yDiff;
}
polygon.setOrigin(x, y);
polygon.setVertices(vertices);
setDirty();
}
public void set(Polygon polygon) {
this.polygon.setOrigin(polygon.polygon.getOriginX(), polygon.polygon.getOriginY());
this.polygon.setVertices(polygon.polygon.getTransformedVertices());
this.polygon.setRotation(0f);
this.trackedRotation = polygon.trackedRotation;
clearTotalSidesCache();
setDirty();
}
@Override
public void translate(float translateX, float translateY) {
float[] vertices = polygon.getTransformedVertices();
for (int i = 0; i < vertices.length; i += 2) {
vertices[i] += translateX;
vertices[i + 1] += translateY;
}
polygon.setOrigin(vertices[0], vertices[1]);
polygon.setVertices(vertices);
setDirty();
}
@Override
public EdgeIterator edgeIterator() {
return edgeIterator;
}
@Override
public boolean isCircle() {
return false;
}
@Override
public Polygon getPolygon() {
return this;
}
public float getOriginX() {
return polygon.getOriginX();
}
public float getOriginY() {
return polygon.getOriginY();
}
boolean isDirty() {
return minMaxDirty || trianglesDirty;
}
private void setDirty() {
minMaxDirty = true;
trianglesDirty = true;
}
private void minMaxDirtyCheck() {
if(!minMaxDirty) {
return;
}
calculateMinMaxXY(polygon.getTransformedVertices());
minMaxDirty = false;
}
private void trianglesDirtyCheck() {
if(!trianglesDirty) {
return;
}
computeTriangles(polygon.getTransformedVertices());
trianglesDirty = false;
}
private void computeTriangles(float[] vertices) {
triangles = triangulator.computeTriangles(vertices);
}
private void calculateMinMaxXY(float[] vertices) {
int minXIndex = 0;
int minYIndex = 1;
int maxXIndex = 0;
int maxYIndex = 1;
for (int i = 2; i < vertices.length; i += 2) {
if (vertices[i] < vertices[minXIndex]) {
minXIndex = i;
}
if (vertices[i + 1] < vertices[minYIndex]) {
minYIndex = i + 1;
}
if (vertices[i] > vertices[maxXIndex]) {
maxXIndex = i;
}
if (vertices[i + 1] > vertices[maxYIndex]) {
maxYIndex = i + 1;
}
}
this.minX = vertices[minXIndex];
this.minY = vertices[minYIndex];
this.maxX = vertices[maxXIndex];
this.maxY = vertices[maxYIndex];
}
@Override
public String toString() {
StringBuilder result = new StringBuilder();
for (int i = 0; i < polygon.getTransformedVertices().length; i += 2) {
result.append("[");
result.append(polygon.getTransformedVertices()[i]);
result.append(",");
result.append(polygon.getTransformedVertices()[i + 1]);
result.append("]");
}
return result.toString();
}
private class PolygonEdgeIterator extends EdgeIterator {
private int edge = 0;
private LineSegment edgeLineSegment = new LineSegment(0f, 0f, 1f, 1f);
@Override
protected void beginIteration() {
edge = -1;
}
@Override
protected void endIteration() {
}
@Override
protected void nextEdge() {
if (edge >= getNumberOfSides()) {
throw new MdxException("No more edges remaining. Make sure to call end()");
}
edge++;
if (!hasNext()) {
return;
}
edgeLineSegment.set(getPointAX(), getPointAY(), getPointBX(), getPointBY());
}
@Override
public boolean hasNext() {
return edge < getNumberOfSides() - 1;
}
@Override
public float getPointAX() {
if (edge < 0) {
throw new MdxException("Make sure to call next() after beginning iteration");
}
return polygon.getTransformedVertices()[edge * 2];
}
@Override
public float getPointAY() {
if (edge < 0) {
throw new MdxException("Make sure to call next() after beginning iteration");
}
return polygon.getTransformedVertices()[(edge * 2) + 1];
}
@Override
public float getPointBX() {
if (edge < 0) {
throw new MdxException("Make sure to call next() after beginning iteration");
}
if (edge == getNumberOfSides() - 1) {
return polygon.getTransformedVertices()[0];
}
return polygon.getTransformedVertices()[(edge + 1) * 2];
}
@Override
public float getPointBY() {
if (edge < 0) {
throw new MdxException("Make sure to call next() after beginning iteration");
}
if (edge == getNumberOfSides() - 1) {
return polygon.getTransformedVertices()[1];
}
return polygon.getTransformedVertices()[((edge + 1) * 2) + 1];
}
@Override
public LineSegment getEdgeLineSegment() {
return edgeLineSegment;
}
}
}
|
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.source.tree.java;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiImplUtil;
import com.intellij.psi.impl.java.stubs.JavaStubElementTypes;
import com.intellij.psi.impl.java.stubs.PsiNameValuePairStub;
import com.intellij.psi.impl.source.JavaStubPsiElement;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.ChildRole;
import com.intellij.psi.impl.source.tree.ElementType;
import com.intellij.psi.util.MethodSignature;
import com.intellij.psi.util.MethodSignatureUtil;
import com.intellij.reference.SoftReference;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.JBIterable;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.ref.Reference;
import java.util.Objects;
/**
* @author Dmitry Avdeev
*/
public class PsiNameValuePairImpl extends JavaStubPsiElement<PsiNameValuePairStub> implements PsiNameValuePair {
public PsiNameValuePairImpl(@NotNull PsiNameValuePairStub stub) {
super(stub, JavaStubElementTypes.NAME_VALUE_PAIR);
}
public PsiNameValuePairImpl(@NotNull ASTNode node) {
super(node);
}
@NotNull
@Override
public NameValuePairElement getNode() {
ASTNode node = super.getNode();
if (!(node instanceof NameValuePairElement)) {
String parents = String.join("; ", SyntaxTraverser.psiApi().parents(this).takeWhile(Objects::nonNull)
.map(psi -> psi.getClass().getName()).toList());
throw new IllegalStateException("Node is not NameValuePairElement; node class = " + node.getClass() + "; parents = " + parents);
}
return (NameValuePairElement)node;
}
@Override
public String getName() {
PsiNameValuePairStub stub = getStub();
if (stub == null) {
PsiIdentifier nameIdentifier = getNameIdentifier();
return nameIdentifier == null ? null : nameIdentifier.getText();
}
else {
return stub.getName();
}
}
@Override
public String getLiteralValue() {
PsiAnnotationMemberValue value = getValue();
return value instanceof PsiLiteralExpression ? StringUtil.unquoteString(value.getText()) : null;
}
@Override
public PsiIdentifier getNameIdentifier() {
ASTNode node = getNode().findChildByRole(ChildRole.NAME);
return node == null ? null : (PsiIdentifier)node.getPsi();
}
@Override
public PsiAnnotationMemberValue getValue() {
PsiLiteralExpression child = getStubOrPsiChild(JavaStubElementTypes.LITERAL_EXPRESSION);
if (child != null) return child;
ASTNode node = getNode().findChildByRole(ChildRole.ANNOTATION_VALUE);
return node == null ? null : (PsiAnnotationMemberValue)node.getPsi();
}
@NotNull
@Override
public PsiAnnotationMemberValue setValue(@NotNull PsiAnnotationMemberValue newValue) {
getValue().replace(newValue);
return getValue();
}
private volatile Reference<PsiAnnotationMemberValue> myDetachedValue;
@Override
@Nullable
public PsiAnnotationMemberValue getDetachedValue() {
PsiNameValuePairStub stub = getStub();
if (stub != null) {
String text = stub.getValue();
PsiAnnotationMemberValue result = SoftReference.dereference(myDetachedValue);
if (result == null) {
PsiAnnotation anno = JavaPsiFacade.getElementFactory(getProject()).createAnnotationFromText("@F(" + text + ")", this);
((LightVirtualFile)anno.getContainingFile().getViewProvider().getVirtualFile()).setWritable(false);
myDetachedValue = new SoftReference<>(result = anno.findAttributeValue(null));
}
return result;
}
return getValue();
}
@Override
public void subtreeChanged() {
myDetachedValue = null;
super.subtreeChanged();
}
@Override
public PsiReference getReference() {
return new PsiReference() {
@Nullable
private PsiClass getReferencedClass() {
LOG.assertTrue(getParent() instanceof PsiAnnotationParameterList && getParent().getParent() instanceof PsiAnnotation);
PsiAnnotation annotation = (PsiAnnotation)getParent().getParent();
PsiJavaCodeReferenceElement nameRef = annotation.getNameReferenceElement();
if (nameRef == null) return null;
PsiElement target = nameRef.resolve();
return target instanceof PsiClass ? (PsiClass)target : null;
}
@NotNull
@Override
public PsiElement getElement() {
return PsiNameValuePairImpl.this;
}
@NotNull
@Override
public TextRange getRangeInElement() {
PsiIdentifier id = getNameIdentifier();
if (id != null) {
return new TextRange(0, id.getTextLength());
}
return TextRange.EMPTY_RANGE;
}
@Override
public PsiElement resolve() {
PsiClass refClass = getReferencedClass();
if (refClass == null) return null;
String name = getName();
if (name == null) name = PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME;
MethodSignature signature = MethodSignatureUtil
.createMethodSignature(name, PsiType.EMPTY_ARRAY, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
return MethodSignatureUtil.findMethodBySignature(refClass, signature, false);
}
@Override
@NotNull
public String getCanonicalText() {
String name = getName();
return name != null ? name : PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME;
}
@Override
public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException {
PsiIdentifier nameIdentifier = getNameIdentifier();
if (nameIdentifier != null) {
PsiImplUtil.setName(nameIdentifier, newElementName);
}
else if (ElementType.ANNOTATION_MEMBER_VALUE_BIT_SET.contains(getNode().getFirstChildNode().getElementType())) {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(getProject());
nameIdentifier = factory.createIdentifier(newElementName);
addBefore(nameIdentifier, SourceTreeToPsiMap.treeElementToPsi(getNode().getFirstChildNode()));
}
return PsiNameValuePairImpl.this;
}
@Override
public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException {
throw new IncorrectOperationException("Not implemented");
}
@Override
public boolean isReferenceTo(@NotNull PsiElement element) {
return element instanceof PsiMethod && element.equals(resolve());
}
@Override
public boolean isSoft() {
return false;
}
};
}
@Override
public final void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof JavaElementVisitor) {
((JavaElementVisitor)visitor).visitNameValuePair(this);
}
else {
visitor.visitElement(this);
}
}
@Override
public String toString() {
return "PsiNameValuePair";
}
private static final Logger LOG = Logger.getInstance(PsiNameValuePairImpl.class);
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer.ppr;
import java.util.AbstractSequentialList;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.StrictChecks;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.PrunerUtils;
import org.apache.hadoop.hive.ql.optimizer.Transform;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
/**
* The transformation step that does partition pruning.
*
*/
public class PartitionPruner extends Transform {
// The log
public static final String CLASS_NAME = PartitionPruner.class.getName();
public static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.hive.ql.optimizer.Transform#transform(org.apache.hadoop
* .hive.ql.parse.ParseContext)
*/
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
// create a the context for walking operators
OpWalkerCtx opWalkerCtx = new OpWalkerCtx(pctx.getOpToPartPruner());
/* Move logic to PrunerUtils.walkOperatorTree() so that it can be reused. */
PrunerUtils.walkOperatorTree(pctx, opWalkerCtx, OpProcFactory.getFilterProc(),
OpProcFactory.getDefaultProc());
return pctx;
}
/**
* Find out whether the condition only contains partitioned columns. Note that
* if the table is not partitioned, the function always returns true.
* condition.
*
* @param tab
* the table object
* @param expr
* the pruner expression for the table
*/
public static boolean onlyContainsPartnCols(Table tab, ExprNodeDesc expr) {
if (!tab.isPartitioned() || (expr == null)) {
return true;
}
if (expr instanceof ExprNodeColumnDesc) {
String colName = ((ExprNodeColumnDesc) expr).getColumn();
return tab.isPartitionKey(colName);
}
// It cannot contain a non-deterministic function
if ((expr instanceof ExprNodeGenericFuncDesc)
&& !FunctionRegistry.isConsistentWithinQuery(((ExprNodeGenericFuncDesc) expr)
.getGenericUDF())) {
return false;
}
// All columns of the expression must be partitioned columns
List<ExprNodeDesc> children = expr.getChildren();
if (children != null) {
for (int i = 0; i < children.size(); i++) {
if (!onlyContainsPartnCols(tab, children.get(i))) {
return false;
}
}
}
return true;
}
/**
* Get the partition list for the TS operator that satisfies the partition pruner
* condition.
*/
public static PrunedPartitionList prune(TableScanOperator ts, ParseContext parseCtx,
String alias) throws SemanticException {
return prune(ts.getConf().getTableMetadata(), parseCtx.getOpToPartPruner().get(ts),
parseCtx.getConf(), alias, parseCtx.getPrunedPartitions());
}
/**
* Get the partition list for the table that satisfies the partition pruner
* condition.
*
* @param tab
* the table object for the alias
* @param prunerExpr
* the pruner expression for the alias
* @param conf
* for checking whether "strict" mode is on.
* @param alias
* for generating error message only.
* @param prunedPartitionsMap
* cached result for the table
* @return the partition list for the table that satisfies the partition
* pruner condition.
* @throws SemanticException
*/
public static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr,
HiveConf conf, String alias, Map<String, PrunedPartitionList> prunedPartitionsMap)
throws SemanticException {
if (LOG.isTraceEnabled()) {
LOG.trace("Started pruning partition");
LOG.trace("dbname = " + tab.getDbName());
LOG.trace("tabname = " + tab.getTableName());
LOG.trace("prune Expression = " + (prunerExpr == null ? "" : prunerExpr));
}
String key = tab.getFullyQualifiedName() + ";";
if (!tab.isPartitioned()) {
// If the table is not partitioned, return empty list.
return getAllPartsFromCacheOrServer(tab, key, false, prunedPartitionsMap);
}
if (!hasColumnExpr(prunerExpr)) {
// If the "strict" mode is on, we have to provide partition pruner for each table.
String error = StrictChecks.checkNoPartitionFilter(conf);
if (error != null) {
throw new SemanticException(error + " No partition predicate for Alias \""
+ alias + "\" Table \"" + tab.getTableName() + "\"");
}
}
if (prunerExpr == null) {
// In non-strict mode and there is no predicates at all - get everything.
return getAllPartsFromCacheOrServer(tab, key, false, prunedPartitionsMap);
}
Set<String> partColsUsedInFilter = new LinkedHashSet<String>();
// Replace virtual columns with nulls. See javadoc for details.
prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab), partColsUsedInFilter);
// Remove all parts that are not partition columns. See javadoc for details.
ExprNodeDesc compactExpr = compactExpr(prunerExpr.clone());
String oldFilter = prunerExpr.getExprString();
if (compactExpr == null || isBooleanExpr(compactExpr)) {
if (isFalseExpr(compactExpr)) {
return new PrunedPartitionList(tab, key + compactExpr.getExprString(),
new LinkedHashSet<Partition>(0), new ArrayList<String>(0), false);
}
// For null and true values, return every partition
return getAllPartsFromCacheOrServer(tab, key, true, prunedPartitionsMap);
}
String compactExprString = compactExpr.getExprString();
if (LOG.isDebugEnabled()) {
LOG.debug("Filter w/ compacting: " + compactExprString
+ "; filter w/o compacting: " + oldFilter);
}
key = key + compactExprString;
PrunedPartitionList ppList = prunedPartitionsMap.get(key);
if (ppList != null) {
return ppList;
}
ppList = getPartitionsFromServer(tab, key, (ExprNodeGenericFuncDesc)compactExpr,
conf, alias, partColsUsedInFilter, oldFilter.equals(compactExpr.getExprString()));
prunedPartitionsMap.put(key, ppList);
return ppList;
}
private static PrunedPartitionList getAllPartsFromCacheOrServer(Table tab, String key, boolean unknownPartitions,
Map<String, PrunedPartitionList> partsCache) throws SemanticException {
PrunedPartitionList ppList = partsCache == null ? null : partsCache.get(key);
if (ppList != null) {
return ppList;
}
Set<Partition> parts;
try {
parts = getAllPartitions(tab);
} catch (HiveException e) {
throw new SemanticException(e);
}
ppList = new PrunedPartitionList(tab, key, parts, null, unknownPartitions);
if (partsCache != null) {
partsCache.put(key, ppList);
}
return ppList;
}
static private boolean isBooleanExpr(ExprNodeDesc expr) {
return expr != null && expr instanceof ExprNodeConstantDesc &&
((ExprNodeConstantDesc)expr).getTypeInfo() instanceof PrimitiveTypeInfo &&
((PrimitiveTypeInfo)(((ExprNodeConstantDesc)expr).getTypeInfo())).
getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME);
}
static private boolean isTrueExpr(ExprNodeDesc expr) {
return isBooleanExpr(expr) &&
((ExprNodeConstantDesc)expr).getValue() != null &&
((ExprNodeConstantDesc)expr).getValue().equals(Boolean.TRUE);
}
static private boolean isFalseExpr(ExprNodeDesc expr) {
return isBooleanExpr(expr) &&
((ExprNodeConstantDesc)expr).getValue() != null &&
((ExprNodeConstantDesc)expr).getValue().equals(Boolean.FALSE);
}
/**
* Taking a partition pruning expression, remove the null operands and non-partition columns.
* The reason why there are null operands is ExprProcFactory classes, for example
* PPRColumnExprProcessor.
* @param expr original partition pruning expression.
* @return partition pruning expression that only contains partition columns.
*/
@VisibleForTesting
static ExprNodeDesc compactExpr(ExprNodeDesc expr) {
// If this is a constant boolean expression, return the value.
if (expr == null) {
return null;
}
if (expr instanceof ExprNodeConstantDesc) {
if (((ExprNodeConstantDesc)expr).getValue() == null) {
return null;
}
if (!isBooleanExpr(expr)) {
throw new IllegalStateException("Unexpected non-boolean ExprNodeConstantDesc: "
+ expr.getExprString());
}
return expr;
} else if (expr instanceof ExprNodeColumnDesc) {
return expr;
} else if (expr instanceof ExprNodeGenericFuncDesc) {
GenericUDF udf = ((ExprNodeGenericFuncDesc)expr).getGenericUDF();
boolean isAnd = udf instanceof GenericUDFOPAnd;
boolean isOr = udf instanceof GenericUDFOPOr;
List<ExprNodeDesc> children = expr.getChildren();
if (isAnd) {
// Non-partition expressions are converted to nulls.
List<ExprNodeDesc> newChildren = new ArrayList<ExprNodeDesc>();
boolean allTrue = true;
for (ExprNodeDesc child : children) {
ExprNodeDesc compactChild = compactExpr(child);
if (compactChild != null) {
if (!isTrueExpr(compactChild)) {
newChildren.add(compactChild);
allTrue = false;
}
if (isFalseExpr(compactChild)) {
return new ExprNodeConstantDesc(Boolean.FALSE);
}
} else {
allTrue = false;
}
}
if (allTrue) {
return new ExprNodeConstantDesc(Boolean.TRUE);
}
if (newChildren.size() == 0) {
return null;
}
if (newChildren.size() == 1) {
return newChildren.get(0);
}
// Nothing to compact, update expr with compacted children.
((ExprNodeGenericFuncDesc) expr).setChildren(newChildren);
} else if (isOr) {
// Non-partition expressions are converted to nulls.
List<ExprNodeDesc> newChildren = new ArrayList<ExprNodeDesc>();
boolean allFalse = true;
boolean isNull = false;
for (ExprNodeDesc child : children) {
ExprNodeDesc compactChild = compactExpr(child);
if (compactChild != null) {
if (isTrueExpr(compactChild)) {
return new ExprNodeConstantDesc(Boolean.TRUE);
}
if (!isNull && !isFalseExpr(compactChild)) {
newChildren.add(compactChild);
allFalse = false;
}
} else {
isNull = true;
}
}
if (isNull) {
return null;
}
if (allFalse) {
return new ExprNodeConstantDesc(Boolean.FALSE);
}
if (newChildren.size() == 1) {
return newChildren.get(0);
}
// Nothing to compact, update expr with compacted children.
((ExprNodeGenericFuncDesc) expr).setChildren(newChildren);
}
return expr;
} else {
throw new IllegalStateException("Unexpected type of ExprNodeDesc: " + expr.getExprString());
}
}
/**
* See compactExpr. Some things in the expr are replaced with nulls for pruner, however
* the virtual columns are not removed (ExprNodeColumnDesc cannot tell them apart from
* partition columns), so we do it here.
* The expression is only used to prune by partition name, so we have no business with VCs.
* @param expr original partition pruning expression.
* @param partCols list of partition columns for the table.
* @param referred partition columns referred by expr
* @return partition pruning expression that only contains partition columns from the list.
*/
static private ExprNodeDesc removeNonPartCols(ExprNodeDesc expr, List<String> partCols,
Set<String> referred) {
if (expr instanceof ExprNodeFieldDesc) {
// Column is not a partition column for the table,
// as we do not allow partitions based on complex
// list or struct fields.
return new ExprNodeConstantDesc(expr.getTypeInfo(), null);
}
else if (expr instanceof ExprNodeColumnDesc) {
String column = ((ExprNodeColumnDesc) expr).getColumn();
if (!partCols.contains(column)) {
// Column doesn't appear to be a partition column for the table.
return new ExprNodeConstantDesc(expr.getTypeInfo(), null);
}
referred.add(column);
}
else if (expr instanceof ExprNodeGenericFuncDesc) {
List<ExprNodeDesc> children = expr.getChildren();
for (int i = 0; i < children.size(); ++i) {
ExprNodeDesc other = removeNonPartCols(children.get(i), partCols, referred);
if (ExprNodeDescUtils.isNullConstant(other)) {
if (FunctionRegistry.isOpAnd(expr)) {
// partcol=... AND nonpartcol=... is replaced with partcol=... AND TRUE
// which will be folded to partcol=...
// This cannot be done also for OR
Preconditions.checkArgument(expr.getTypeInfo().accept(TypeInfoFactory.booleanTypeInfo));
other = new ExprNodeConstantDesc(expr.getTypeInfo(), true);
} else {
// Functions like NVL, COALESCE, CASE can change a
// NULL introduced by a nonpart column removal into a non-null
// and cause overaggressive prunning, missing data (incorrect result)
return new ExprNodeConstantDesc(expr.getTypeInfo(), null);
}
}
children.set(i, other);
}
}
return expr;
}
/**
* @param expr Expression.
* @return True iff expr contains any non-native user-defined functions.
*/
static private boolean hasUserFunctions(ExprNodeDesc expr) {
if (!(expr instanceof ExprNodeGenericFuncDesc)) {
return false;
}
if (!FunctionRegistry.isBuiltInFuncExpr((ExprNodeGenericFuncDesc) expr)) {
return true;
}
for (ExprNodeDesc child : expr.getChildren()) {
if (hasUserFunctions(child)) {
return true;
}
}
return false;
}
private static PrunedPartitionList getPartitionsFromServer(Table tab, final String key, final ExprNodeGenericFuncDesc compactExpr,
HiveConf conf, String alias, Set<String> partColsUsedInFilter, boolean isPruningByExactFilter) throws SemanticException {
try {
// Finally, check the filter for non-built-in UDFs. If these are present, we cannot
// do filtering on the server, and have to fall back to client path.
boolean doEvalClientSide = hasUserFunctions(compactExpr);
// Now filter.
List<Partition> partitions = new ArrayList<Partition>();
boolean hasUnknownPartitions = false;
PerfLogger perfLogger = SessionState.getPerfLogger();
if (!doEvalClientSide) {
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
try {
hasUnknownPartitions = Hive.get().getPartitionsByExpr(
tab, compactExpr, conf, partitions);
} catch (IMetaStoreClient.IncompatibleMetastoreException ime) {
// TODO: backward compat for Hive <= 0.12. Can be removed later.
LOG.warn("Metastore doesn't support getPartitionsByExpr", ime);
doEvalClientSide = true;
} finally {
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
}
}
if (doEvalClientSide) {
// Either we have user functions, or metastore is old version - filter names locally.
hasUnknownPartitions = pruneBySequentialScan(tab, partitions, compactExpr, conf);
}
// The partitions are "unknown" if the call says so due to the expression
// evaluator returning null for a partition, or if we sent a partial expression to
// metastore and so some partitions may have no data based on other filters.
return new PrunedPartitionList(tab, key,
new LinkedHashSet<Partition>(partitions),
new ArrayList<String>(partColsUsedInFilter),
hasUnknownPartitions || !isPruningByExactFilter);
} catch (SemanticException e) {
throw e;
} catch (Exception e) {
throw new SemanticException(e);
}
}
private static Set<Partition> getAllPartitions(Table tab) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
Set<Partition> result = Hive.get().getAllPartitionsOf(tab);
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
return result;
}
/**
* Pruning partition by getting the partition names first and pruning using Hive expression
* evaluator on client.
* @param tab the table containing the partitions.
* @param partitions the resulting partitions.
* @param prunerExpr the SQL predicate that involves partition columns.
* @param conf Hive Configuration object, can not be NULL.
* @return true iff the partition pruning expression contains non-partition columns.
*/
static private boolean pruneBySequentialScan(Table tab, List<Partition> partitions,
ExprNodeGenericFuncDesc prunerExpr, HiveConf conf) throws HiveException, MetaException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PRUNE_LISTING);
List<String> partNames = Hive.get().getPartitionNames(
tab.getDbName(), tab.getTableName(), (short) -1);
String defaultPartitionName = conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME);
List<String> partCols = extractPartColNames(tab);
List<PrimitiveTypeInfo> partColTypeInfos = extractPartColTypes(tab);
boolean hasUnknownPartitions = prunePartitionNames(
partCols, partColTypeInfos, prunerExpr, defaultPartitionName, partNames);
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PRUNE_LISTING);
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
if (!partNames.isEmpty()) {
partitions.addAll(Hive.get().getPartitionsByNames(tab, partNames));
}
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
return hasUnknownPartitions;
}
private static List<String> extractPartColNames(Table tab) {
List<FieldSchema> pCols = tab.getPartCols();
List<String> partCols = new ArrayList<String>(pCols.size());
for (FieldSchema pCol : pCols) {
partCols.add(pCol.getName());
}
return partCols;
}
private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) {
List<FieldSchema> pCols = tab.getPartCols();
List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size());
for (FieldSchema pCol : pCols) {
partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType()));
}
return partColTypeInfos;
}
/**
* Prunes partition names to see if they match the prune expression.
* @param partColumnNames name of partition columns
* @param partColumnTypeInfos types of partition columns
* @param prunerExpr The expression to match.
* @param defaultPartitionName name of default partition
* @param partNames Partition names to filter. The list is modified in place.
* @return Whether the list has any partitions for which the expression may or may not match.
*/
public static boolean prunePartitionNames(List<String> partColumnNames,
List<PrimitiveTypeInfo> partColumnTypeInfos, ExprNodeGenericFuncDesc prunerExpr,
String defaultPartitionName, List<String> partNames) throws HiveException, MetaException {
// Prepare the expression to filter on the columns.
ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> handle =
PartExprEvalUtils.prepareExpr(prunerExpr, partColumnNames, partColumnTypeInfos);
// Filter the name list. Removing elements one by one can be slow on e.g. ArrayList,
// so let's create a new list and copy it if we don't have a linked list
boolean inPlace = partNames instanceof AbstractSequentialList<?>;
List<String> partNamesSeq = inPlace ? partNames : new LinkedList<String>(partNames);
// Array for the values to pass to evaluator.
ArrayList<String> values = new ArrayList<String>(partColumnNames.size());
for (int i = 0; i < partColumnNames.size(); ++i) {
values.add(null);
}
boolean hasUnknownPartitions = false;
Iterator<String> partIter = partNamesSeq.iterator();
while (partIter.hasNext()) {
String partName = partIter.next();
Warehouse.makeValsFromName(partName, values);
ArrayList<Object> convertedValues = new ArrayList<Object>(values.size());
for(int i=0; i<values.size(); i++) {
String partitionValue = values.get(i);
PrimitiveTypeInfo typeInfo = partColumnTypeInfos.get(i);
if (partitionValue.equals(defaultPartitionName)) {
convertedValues.add(null); // Null for default partition.
} else {
Object o = ObjectInspectorConverters.getConverter(
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo))
.convert(partitionValue);
convertedValues.add(o);
}
}
// Evaluate the expression tree.
Boolean isNeeded = (Boolean)PartExprEvalUtils.evaluateExprOnPart(handle, convertedValues);
boolean isUnknown = (isNeeded == null);
if (!isUnknown && !isNeeded) {
partIter.remove();
continue;
}
if (isUnknown && values.contains(defaultPartitionName)) {
// Reject default partitions if we couldn't determine whether we should include it or not.
// Note that predicate would only contains partition column parts of original predicate.
if (LOG.isDebugEnabled()) {
LOG.debug("skipping default/bad partition: " + partName);
}
partIter.remove();
continue;
}
hasUnknownPartitions |= isUnknown;
if (LOG.isDebugEnabled()) {
LOG.debug("retained " + (isUnknown ? "unknown " : "") + "partition: " + partName);
}
}
if (!inPlace) {
partNames.clear();
partNames.addAll(partNamesSeq);
}
return hasUnknownPartitions;
}
/**
* Whether the expression contains a column node or not.
*/
public static boolean hasColumnExpr(ExprNodeDesc desc) {
// Return false for null
if (desc == null) {
return false;
}
// Return true for exprNodeColumnDesc
if (desc instanceof ExprNodeColumnDesc) {
return true;
}
// Return true in case one of the children is column expr.
List<ExprNodeDesc> children = desc.getChildren();
if (children != null) {
for (int i = 0; i < children.size(); i++) {
if (hasColumnExpr(children.get(i))) {
return true;
}
}
}
// Return false otherwise
return false;
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationActivationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.AppIconScheme;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.util.IconUtil;
import com.intellij.util.ui.ImageUtil;
import com.intellij.util.ui.UIUtil;
import org.apache.sanselan.ImageWriteException;
import org.apache.sanselan.common.BinaryConstants;
import org.apache.sanselan.common.BinaryOutputStream;
import org.jetbrains.annotations.NotNull;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.Area;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RoundRectangle2D;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public abstract class AppIcon {
private static final Logger LOG = Logger.getInstance(AppIcon.class);
private static AppIcon ourIcon;
@NotNull
public static AppIcon getInstance() {
if (ourIcon == null) {
if (SystemInfo.isMac) {
ourIcon = new MacAppIcon();
}
else if (SystemInfo.isWin7OrNewer) {
ourIcon = new Win7AppIcon();
}
else {
ourIcon = new EmptyIcon();
}
}
return ourIcon;
}
public abstract boolean setProgress(Project project, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk);
public abstract boolean hideProgress(Project project, Object processId);
public abstract void setErrorBadge(Project project, String text);
public abstract void setOkBadge(Project project, boolean visible);
public abstract void requestAttention(Project project, boolean critical);
public abstract void requestFocus(IdeFrame frame);
private static abstract class BaseIcon extends AppIcon {
private ApplicationActivationListener myAppListener;
protected Object myCurrentProcessId;
protected double myLastValue;
@Override
public final boolean setProgress(Project project, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
if (!isAppActive() && Registry.is("ide.appIcon.progress") && (myCurrentProcessId == null || myCurrentProcessId.equals(processId))) {
return _setProgress(getIdeFrame(project), processId, scheme, value, isOk);
}
else {
return false;
}
}
@Override
public final boolean hideProgress(Project project, Object processId) {
if (Registry.is("ide.appIcon.progress")) {
return _hideProgress(getIdeFrame(project), processId);
}
else {
return false;
}
}
@Override
public final void setErrorBadge(Project project, String text) {
if (!isAppActive() && Registry.is("ide.appIcon.badge")) {
_setOkBadge(getIdeFrame(project), false);
_setTextBadge(getIdeFrame(project), text);
}
}
@Override
public final void setOkBadge(Project project, boolean visible) {
if (!isAppActive() && Registry.is("ide.appIcon.badge")) {
_setTextBadge(getIdeFrame(project), null);
_setOkBadge(getIdeFrame(project), visible);
}
}
@Override
public final void requestAttention(Project project, boolean critical) {
if (!isAppActive() && Registry.is("ide.appIcon.requestAttention")) {
_requestAttention(getIdeFrame(project), critical);
}
}
public abstract boolean _setProgress(IdeFrame frame, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk);
public abstract boolean _hideProgress(IdeFrame frame, Object processId);
public abstract void _setTextBadge(IdeFrame frame, String text);
public abstract void _setOkBadge(IdeFrame frame, boolean visible);
public abstract void _requestAttention(IdeFrame frame, boolean critical);
protected abstract IdeFrame getIdeFrame(Project project);
private boolean isAppActive() {
Application app = ApplicationManager.getApplication();
if (app != null && myAppListener == null) {
myAppListener = new ApplicationActivationListener.Adapter() {
@Override
public void applicationActivated(IdeFrame ideFrame) {
hideProgress(ideFrame.getProject(), myCurrentProcessId);
_setOkBadge(ideFrame, false);
_setTextBadge(ideFrame, null);
}
};
app.getMessageBus().connect().subscribe(ApplicationActivationListener.TOPIC, myAppListener);
}
return app != null && app.isActive();
}
}
@SuppressWarnings("UseJBColor")
private static class MacAppIcon extends BaseIcon {
private BufferedImage myAppImage;
private BufferedImage getAppImage() {
assertIsDispatchThread();
try {
if (myAppImage != null) return myAppImage;
Object app = getApp();
Image appImage = (Image)getAppMethod("getDockIconImage").invoke(app);
if (appImage == null) return null;
myAppImage = ImageUtil.toBufferedImage(appImage);
}
catch (NoSuchMethodException e) {
return null;
}
catch (Exception e) {
LOG.error(e);
}
return myAppImage;
}
@Override
public void _setTextBadge(IdeFrame frame, String text) {
assertIsDispatchThread();
try {
getAppMethod("setDockIconBadge", String.class).invoke(getApp(), text);
}
catch (NoSuchMethodException ignored) { }
catch (Exception e) {
LOG.error(e);
}
}
@Override
public void requestFocus(IdeFrame frame) {
assertIsDispatchThread();
try {
getAppMethod("requestForeground", boolean.class).invoke(getApp(), true);
}
catch (NoSuchMethodException ignored) { }
catch (Exception e) {
LOG.error(e);
}
}
@Override
public void _requestAttention(IdeFrame frame, boolean critical) {
assertIsDispatchThread();
try {
getAppMethod("requestUserAttention", boolean.class).invoke(getApp(), critical);
}
catch (NoSuchMethodException ignored) { }
catch (Exception e) {
LOG.error(e);
}
}
@Override
protected IdeFrame getIdeFrame(Project project) {
return null;
}
@Override
public boolean _hideProgress(IdeFrame frame, Object processId) {
assertIsDispatchThread();
if (getAppImage() == null) return false;
if (myCurrentProcessId != null && !myCurrentProcessId.equals(processId)) return false;
setDockIcon(getAppImage());
myCurrentProcessId = null;
myLastValue = 0;
return true;
}
@Override
public void _setOkBadge(IdeFrame frame, boolean visible) {
assertIsDispatchThread();
if (getAppImage() == null) return;
AppImage img = createAppImage();
if (visible) {
Icon okIcon = AllIcons.Mac.AppIconOk512;
int myImgWidth = img.myImg.getWidth();
if (myImgWidth != 128) {
okIcon = IconUtil.scale(okIcon, myImgWidth / 128);
}
int x = myImgWidth - okIcon.getIconWidth();
int y = 0;
okIcon.paintIcon(JOptionPane.getRootFrame(), img.myG2d, x, y);
}
setDockIcon(img.myImg);
}
// white 80% transparent
private static Color PROGRESS_BACKGROUND_COLOR = new Color(255, 255, 255, 217);
private static Color PROGRESS_OUTLINE_COLOR = new Color(140, 139, 140);
@Override
public boolean _setProgress(IdeFrame frame, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
assertIsDispatchThread();
if (getAppImage() == null) return false;
myCurrentProcessId = processId;
if (myLastValue > value) return true;
if (Math.abs(myLastValue - value) < 0.02d) return true;
try {
double progressHeight = (myAppImage.getHeight() * 0.13);
double xInset = (myAppImage.getWidth() * 0.05);
double yInset = (myAppImage.getHeight() * 0.15);
final double width = myAppImage.getWidth() - xInset * 2;
final double y = myAppImage.getHeight() - progressHeight - yInset;
Area borderArea = new Area( new RoundRectangle2D.Double(
xInset - 1, y - 1, width + 2, progressHeight + 2,
(progressHeight + 2), (progressHeight + 2
)));
Area backgroundArea = new Area(new Rectangle2D.Double(xInset, y, width, progressHeight));
backgroundArea.intersect(borderArea);
Area progressArea = new Area(new Rectangle2D.Double(xInset + 1, y + 1,(width - 2) * value, progressHeight - 1));
progressArea.intersect(borderArea);
AppImage appImg = createAppImage();
appImg.myG2d.setColor(PROGRESS_BACKGROUND_COLOR);
appImg.myG2d.fill(backgroundArea);
final Color color = isOk ? scheme.getOkColor() : scheme.getErrorColor();
appImg.myG2d.setColor(color);
appImg.myG2d.fill(progressArea);
appImg.myG2d.setColor(PROGRESS_OUTLINE_COLOR);
appImg.myG2d.draw(backgroundArea);
appImg.myG2d.draw(borderArea);
setDockIcon(appImg.myImg);
myLastValue = value;
}
catch (Exception e) {
LOG.error(e);
}
finally {
myCurrentProcessId = null;
}
return true;
}
private AppImage createAppImage() {
BufferedImage appImage = getAppImage();
assert appImage != null;
@SuppressWarnings("UndesirableClassUsage")
BufferedImage current = new BufferedImage(appImage.getWidth(), appImage.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g = current.createGraphics();
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
UIUtil.drawImage(g, appImage, 0, 0, null);
return new AppImage(current, g);
}
private static class AppImage {
BufferedImage myImg;
Graphics2D myG2d;
AppImage(BufferedImage img, Graphics2D g2d) {
myImg = img;
myG2d = g2d;
}
}
private static void setDockIcon(BufferedImage image) {
try {
getAppMethod("setDockIconImage", Image.class).invoke(getApp(), image);
}
catch (Exception e) {
LOG.error(e);
}
}
private static Method getAppMethod(final String name, Class... args) throws NoSuchMethodException, ClassNotFoundException {
return getAppClass().getMethod(name, args);
}
private static Object getApp() throws NoSuchMethodException, ClassNotFoundException, InvocationTargetException, IllegalAccessException {
return getAppClass().getMethod("getApplication").invoke(null);
}
private static Class<?> getAppClass() throws ClassNotFoundException {
return Class.forName("com.apple.eawt.Application");
}
}
@SuppressWarnings("UseJBColor")
private static class Win7AppIcon extends BaseIcon {
@Override
public boolean _setProgress(IdeFrame frame, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
myCurrentProcessId = processId;
if (Math.abs(myLastValue - value) < 0.02d) {
return true;
}
try {
if (isValid(frame)) {
Win7TaskBar.setProgress(frame, value, isOk);
}
}
catch (Throwable e) {
LOG.error(e);
}
myLastValue = value;
myCurrentProcessId = null;
return true;
}
@Override
public boolean _hideProgress(IdeFrame frame, Object processId) {
if (myCurrentProcessId != null && !myCurrentProcessId.equals(processId)) {
return false;
}
try {
if (isValid(frame)) {
Win7TaskBar.hideProgress(frame);
}
}
catch (Throwable e) {
LOG.error(e);
}
myCurrentProcessId = null;
myLastValue = 0;
return true;
}
private static void writeTransparentIcoImageWithSanselan(BufferedImage src, OutputStream os)
throws ImageWriteException, IOException {
LOG.assertTrue(BufferedImage.TYPE_INT_ARGB == src.getType() || BufferedImage.TYPE_4BYTE_ABGR == src.getType());
int bitCount = 32;
BinaryOutputStream bos = new BinaryOutputStream(os, BinaryConstants.BYTE_ORDER_INTEL);
try {
int scanline_size = (bitCount * src.getWidth() + 7) / 8;
if ((scanline_size % 4) != 0)
scanline_size += 4 - (scanline_size % 4); // pad scanline to 4 byte size.
int t_scanline_size = (src.getWidth() + 7) / 8;
if ((t_scanline_size % 4) != 0)
t_scanline_size += 4 - (t_scanline_size % 4); // pad scanline to 4 byte size.
int imageSize = 40 + src.getHeight() * scanline_size + src.getHeight() * t_scanline_size;
// ICONDIR
bos.write2Bytes(0); // reserved
bos.write2Bytes(1); // 1=ICO, 2=CUR
bos.write2Bytes(1); // count
// ICONDIRENTRY
int iconDirEntryWidth = src.getWidth();
int iconDirEntryHeight = src.getHeight();
if (iconDirEntryWidth > 255 || iconDirEntryHeight > 255) {
iconDirEntryWidth = 0;
iconDirEntryHeight = 0;
}
bos.write(iconDirEntryWidth);
bos.write(iconDirEntryHeight);
bos.write(0);
bos.write(0); // reserved
bos.write2Bytes(1); // color planes
bos.write2Bytes(bitCount);
bos.write4Bytes(imageSize);
bos.write4Bytes(22); // image offset
// BITMAPINFOHEADER
bos.write4Bytes(40); // size
bos.write4Bytes(src.getWidth());
bos.write4Bytes(2 * src.getHeight());
bos.write2Bytes(1); // planes
bos.write2Bytes(bitCount);
bos.write4Bytes(0); // compression
bos.write4Bytes(0); // image size
bos.write4Bytes(0); // x pixels per meter
bos.write4Bytes(0); // y pixels per meter
bos.write4Bytes(0); // colors used, 0 = (1 << bitCount) (ignored)
bos.write4Bytes(0); // colors important
int bit_cache = 0;
int bits_in_cache = 0;
int row_padding = scanline_size - (bitCount * src.getWidth() + 7) / 8;
for (int y = src.getHeight() - 1; y >= 0; y--) {
for (int x = 0; x < src.getWidth(); x++) {
int argb = src.getRGB(x, y);
bos.write(0xff & argb);
bos.write(0xff & (argb >> 8));
bos.write(0xff & (argb >> 16));
bos.write(0xff & (argb >> 24));
}
for (int x = 0; x < row_padding; x++)
bos.write(0);
}
int t_row_padding = t_scanline_size - (src.getWidth() + 7) / 8;
for (int y = src.getHeight() - 1; y >= 0; y--) {
for (int x = 0; x < src.getWidth(); x++) {
int argb = src.getRGB(x, y);
int alpha = 0xff & (argb >> 24);
bit_cache <<= 1;
if (alpha == 0)
bit_cache |= 1;
bits_in_cache++;
if (bits_in_cache >= 8) {
bos.write(0xff & bit_cache);
bit_cache = 0;
bits_in_cache = 0;
}
}
if (bits_in_cache > 0) {
bit_cache <<= (8 - bits_in_cache);
bos.write(0xff & bit_cache);
bit_cache = 0;
bits_in_cache = 0;
}
for (int x = 0; x < t_row_padding; x++)
bos.write(0);
}
}
finally {
try {
bos.close();
} catch (IOException ignored) { }
}
}
private static Color errorBadgeShadowColor = new Color(0,0,0,102);
private static Color errorBadgeMainColor = new Color(255,98,89);
private static Color errorBadgeTextBackgroundColor = new Color(0,0,0,39);
@Override
public void _setTextBadge(IdeFrame frame, String text) {
if (!isValid(frame)) {
return;
}
Object icon = null;
if (text != null) {
try {
int size = 16;
BufferedImage image = UIUtil.createImage(size, size, BufferedImage.TYPE_INT_ARGB);
Graphics2D g = image.createGraphics();
int shadowRadius = 16;
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setPaint(errorBadgeShadowColor);
g.fillRoundRect(size / 2 - shadowRadius / 2, size / 2 - shadowRadius / 2, shadowRadius, shadowRadius, size, size);
int mainRadius = 14;
g.setPaint(errorBadgeMainColor);
g.fillRoundRect(size / 2 - mainRadius / 2, size / 2 - mainRadius / 2, mainRadius, mainRadius, size, size);
Font font = g.getFont();
g.setFont(new Font(font.getName(), Font.BOLD, 9));
FontMetrics fontMetrics = g.getFontMetrics();
int textWidth = fontMetrics.stringWidth(text);
int textHeight = UIUtil.getHighestGlyphHeight(text, font, g);
g.setPaint(errorBadgeTextBackgroundColor);
g.fillOval( size / 2 - textWidth / 2, size / 2 - textHeight / 2, textWidth, textHeight);
g.setColor(Color.white);
g.drawString(text, size / 2 - textWidth / 2, size / 2 - fontMetrics.getHeight() / 2 + fontMetrics.getAscent());
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
writeTransparentIcoImageWithSanselan(image, bytes);
icon = Win7TaskBar.createIcon(bytes.toByteArray());
}
catch (Throwable e) {
LOG.error(e);
}
}
try {
Win7TaskBar.setOverlayIcon(frame, icon, icon != null);
}
catch (Throwable e) {
LOG.error(e);
}
}
private Object myOkIcon;
@Override
public void _setOkBadge(IdeFrame frame, boolean visible) {
if (!isValid(frame)) {
return;
}
Object icon = null;
if (visible) {
synchronized (Win7AppIcon.class) {
if (myOkIcon == null) {
try {
BufferedImage image = ImageIO.read(getClass().getResource("/mac/appIconOk512.png"));
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
writeTransparentIcoImageWithSanselan(image, bytes);
myOkIcon = Win7TaskBar.createIcon(bytes.toByteArray());
}
catch (Throwable e) {
LOG.error(e);
myOkIcon = null;
}
}
icon = myOkIcon;
}
}
try {
Win7TaskBar.setOverlayIcon(frame, icon, false);
}
catch (Throwable e) {
LOG.error(e);
}
}
@Override
public void _requestAttention(IdeFrame frame, boolean critical) {
try {
if (isValid(frame)) {
Win7TaskBar.attention(frame, critical);
}
}
catch (Throwable e) {
LOG.error(e);
}
}
@Override
protected IdeFrame getIdeFrame(Project project) {
return WindowManager.getInstance().getIdeFrame(project);
}
@Override
public void requestFocus(IdeFrame frame) { }
private static boolean isValid(IdeFrame frame) {
return frame != null && ((Component)frame).isDisplayable();
}
}
private static class EmptyIcon extends AppIcon {
@Override
public boolean setProgress(Project project, Object processId, AppIconScheme.Progress scheme, double value, boolean isOk) {
return false;
}
@Override
public boolean hideProgress(Project project, Object processId) {
return false;
}
@Override
public void setErrorBadge(Project project, String text) { }
@Override
public void setOkBadge(Project project, boolean visible) { }
@Override
public void requestAttention(Project project, boolean critical) { }
@Override
public void requestFocus(IdeFrame frame) { }
}
private static void assertIsDispatchThread() {
Application app = ApplicationManager.getApplication();
if (app != null) {
if (!app.isUnitTestMode()) {
app.assertIsDispatchThread();
}
}
else {
assert EventQueue.isDispatchThread();
}
}
}
|
|
package ca.uwaterloo.Lab4_204_08;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import mapper.MapView;
import mapper.NavigationalMap;
import mapper.PositionListener;
import mapper.VectorUtils;
import android.graphics.PointF;
import android.widget.TextView;
import android.widget.Toast;
public class PathFinding implements PositionListener {
public static List<PointF> pathList;
public static MapView mapView;
public static NavigationalMap map;
private List<PointF> wayPoints = new ArrayList<PointF>();
private List<PointF> userDirectWayPoints = new ArrayList<PointF>();
private List<PointF> destDirectWayPoints = new ArrayList<PointF>();
private static TextView pathInfo;
private static Toast wallHit;
public PathFinding(List<PointF> pathList, MapView mapView,
NavigationalMap map, TextView pathInfo, Toast wallHit) {
PathFinding.pathList = pathList;
PathFinding.mapView = mapView;
mapView.addListener(this);
PathFinding.map = map;
PathFinding.pathInfo = pathInfo;
PathFinding.wallHit = wallHit;
wayPoints.add(mapView.getWayPoint1());
wayPoints.add(mapView.getWayPoint2());
wayPoints.add(mapView.getWayPoint3());
wayPoints.add(mapView.getWayPoint4());
wayPoints.add(mapView.getWayPoint5());
wayPoints.add(mapView.getWayPoint6());
wayPoints.add(mapView.getWayPoint7());
wayPoints.add(mapView.getWayPoint8());
}
public void findPath() {
pathList.clear();
if (map.calculateIntersections(mapView.getUserPoint(),
mapView.getDestinationPoint()).size() == 0) {
pathList.add(mapView.getUserPoint());// direct path from user to
// dest
pathList.add(mapView.getDestinationPoint());
mapView.setUserPath(pathList);
if (VectorUtils.distance(mapView.getUserPoint(),
mapView.getDestinationPoint()) < 1) {
wallHit.setText("You have arrived the destination");
wallHit.show();
}
} else {
userDirectWayPoints.clear();
destDirectWayPoints.clear();
for (PointF wayPoint : this.closestToUser(wayPoints)) {// find
// closest
// waypoint
// for user
if (map.calculateIntersections(mapView.getUserPoint(), wayPoint)
.size() == 0) {
userDirectWayPoints.add(wayPoint);
}
}
for (PointF wayPoint : this.closestToDestination(wayPoints)) {// find
// closest
// waypoint
// for
// dest
if (map.calculateIntersections(mapView.getDestinationPoint(),
wayPoint).size() == 0) {
destDirectWayPoints.add(wayPoint);
}
}
if (!userDirectWayPoints.isEmpty()
&& !destDirectWayPoints.isEmpty()) {
if (userDirectWayPoints.size() == 1
&& destDirectWayPoints.size() == 1) {
if (map.calculateIntersections(userDirectWayPoints.get(0),
destDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint());// direct path from
// user's only
// direct
// waypoint
// to dest's
// only
// direct
// waypoint
pathList.add(userDirectWayPoints.get(0));
pathList.add(destDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
}
} else if (destDirectWayPoints.size() == 1) { // user has
// multiple
// direct
// waypoints,
// dest only has
// one
if (map.calculateIntersections(mapView.getUserPoint(),
destDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint()); // direct path
// from
// user to
// dest's
// only direct
// waypoint
pathList.add(destDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
} else {
if (map.calculateIntersections(
userDirectWayPoints.get(0),
destDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint());
pathList.add(userDirectWayPoints.get(0));
pathList.add(destDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
}
}
} else if (userDirectWayPoints.size() == 1) { // user has one
// direct waypoints,
// dest has multiple
if (map.calculateIntersections(
mapView.getDestinationPoint(),
userDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint()); // direct path
// from
// dest to user's
// only direct
// waypoint
pathList.add(userDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
} else {
if (map.calculateIntersections(
destDirectWayPoints.get(0),
userDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint());
pathList.add(userDirectWayPoints.get(0));
pathList.add(destDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
}
}
} else { // both user and dest have multiple direct waypoints
if (map.calculateIntersections(destDirectWayPoints.get(0),
userDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint()); // direct path
// from
// dest to user's
// only direct
// waypoint
pathList.add(userDirectWayPoints.get(0));
pathList.add(destDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
} else if (map.calculateIntersections(
destDirectWayPoints.get(0),
userDirectWayPoints.get(0)).size() != 0) {
if (map.calculateIntersections(
mapView.getDestinationPoint(),
userDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint()); // direct path
// from
// dest to user's
// only direct
// waypoint
pathList.add(userDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
} else if (map.calculateIntersections(mapView.getUserPoint(),
destDirectWayPoints.get(0)).size() == 0) {
pathList.add(mapView.getUserPoint()); // direct path
// from
// user to
// dest's
// only
// direct
// waypoint
pathList.add(destDirectWayPoints.get(0));
pathList.add(mapView.getDestinationPoint());
}
for (PointF userWayPoint : userDirectWayPoints) {
for (PointF destWayPoint : destDirectWayPoints) {
if (map.calculateIntersections(destWayPoint,
userWayPoint).size() == 0 && pathList.isEmpty()) {
pathList.add(mapView.getUserPoint());
pathList.add(userWayPoint);
pathList.add(destWayPoint);
pathList.add(mapView.getDestinationPoint());
}
break;
}
if (!pathList.isEmpty()) {
break;
}
}
}
}
}
mapView.setUserPath(pathList);
}
pathInfo.setText(String.format("User: %d Dest : %d",
userDirectWayPoints.size(), destDirectWayPoints.size()));
}
public List<PointF> closestToUser(List<PointF> ret) {
Collections.sort(ret, new Comparator<PointF>() {
@Override
public int compare(PointF arg0, PointF arg1) {
float distStart0 = VectorUtils.distance(mapView.getUserPoint(),
arg0);
float distStart1 = VectorUtils.distance(mapView.getUserPoint(),
arg1);
if (VectorUtils.isZero(distStart0 - distStart1))
return 0;
else if (distStart0 < distStart1)
return -1;
else
return 1;
}
});
return ret;
}
public List<PointF> closestToDestination(List<PointF> ret) {
Collections.sort(ret, new Comparator<PointF>() {
@Override
public int compare(PointF arg0, PointF arg1) {
float distStart0 = VectorUtils.distance(
mapView.getDestinationPoint(), arg0);
float distStart1 = VectorUtils.distance(
mapView.getDestinationPoint(), arg1);
if (VectorUtils.isZero(distStart0 - distStart1))
return 0;
else if (distStart0 < distStart1)
return -1;
else
return 1;
}
});
return ret;
}
@Override
public void originChanged(MapView source, PointF loc) {
source.setUserPoint(loc);
this.findPath();
}
@Override
public void destinationChanged(MapView source, PointF dest) {
this.findPath();
}
}
|
|
/*###
#
# Copyright Alan Kennedy.
#
# You may contact the copyright holder at this uri:
#
# http://www.xhaus.com/contact/modjy
#
# The licence under which this code is released is the Apache License v2.0.
#
# The terms and conditions of this license are listed in a file contained
# in the distribution that also contained this file, under the name
# LICENSE.txt.
#
# You may also read a copy of the license at the following web address.
#
# http://modjy.xhaus.com/LICENSE.txt
#
###*/
package com.xhaus.modjy;
import java.io.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import org.python.core.*;
import org.python.util.*;
public class ModjyJServlet extends HttpServlet
{
protected final static String MODJY_PYTHON_CLASSNAME = "modjy_servlet";
protected final static String LIB_PYTHON = "/WEB-INF/lib-python";
protected final static String PTH_FILE_EXTENSION = ".pth";
protected PythonInterpreter interp;
protected HttpServlet modjyServlet;
/**
* Read configuration
* 1. Both context and servlet parameters are included in the set,
* so that the definition of some parameters (e.g python.*) can be shared
* between multiple WSGI servlets.
* 2. servlet params take precedence over context parameters
*/
protected Properties readConfiguration ( )
{
Properties props = new Properties();
// Context parameters
ServletContext context = getServletContext();
Enumeration e = context.getInitParameterNames();
while (e.hasMoreElements())
{
String name = (String) e.nextElement();
props.put(name, context.getInitParameter(name));
}
// Servlet parameters override context parameters
e = getInitParameterNames();
while (e.hasMoreElements())
{
String name = (String) e.nextElement();
props.put(name, getInitParameter(name));
}
return props;
}
/**
* Initialise the modjy servlet.
* 1. Read the configuration
* 2. Initialise the jython runtime
* 3. Setup, in relation to the J2EE servlet environment
* 4. Create the jython-implemented servlet
* 5. Initialise the jython-implemented servlet
*/
public void init ( )
throws ServletException
{
try
{
Properties props = readConfiguration();
PythonInterpreter.initialize(System.getProperties(), props, new String[0]);
PySystemState systemState = new PySystemState();
interp = new PythonInterpreter(null, systemState);
String modjyJarLocation = setupEnvironment(interp, props, systemState);
try
{ interp.exec("from modjy import "+MODJY_PYTHON_CLASSNAME); }
catch (PyException ix)
{ throw new ServletException("Unable to import '"+MODJY_PYTHON_CLASSNAME+"' from "+modjyJarLocation+
": do you maybe need to set the 'modjy_jar.location' parameter?", ix);}
PyObject pyServlet = ((PyType)interp.get(MODJY_PYTHON_CLASSNAME)).__call__();
Object temp = pyServlet.__tojava__(HttpServlet.class);
if (temp == Py.NoConversion)
throw new ServletException("Corrupted modjy file: cannot find definition of '"+MODJY_PYTHON_CLASSNAME+"' class");
modjyServlet = (HttpServlet) temp;
modjyServlet.init(this);
}
catch (PyException pyx)
{
throw new ServletException("Exception creating modjy servlet: " + pyx.toString(), pyx);
}
}
/**
* Actually service the incoming request.
* Simply delegate to the jython servlet.
*
* @param request - The incoming HttpServletRequest
* @param response - The outgoing HttpServletResponse
*/
public void service ( HttpServletRequest req, HttpServletResponse resp )
throws ServletException, IOException
{
modjyServlet.service(req, resp);
}
/**
* Setup the modjy environment, i.e.
* 1. Find the location of the modjy.jar file and add it to sys.path
* 2. Process the WEB-INF/lib-python directory, if it exists
*
* @param interp - The PythinInterpreter used to service requests
* @param props - The properties from which config options are found
* @param systemState - The PySystemState corresponding to the interpreter servicing requests
* @returns A String giving the path to the modjy.jar file (which is used only for error reporting)
*/
protected String setupEnvironment(PythonInterpreter interp, Properties props, PySystemState systemState)
{
String modjyJarLocation = locateModjyJar(props);
systemState.path.append(new PyString(modjyJarLocation));
processPythonLib(interp, systemState);
return modjyJarLocation;
}
/**
* Find out the location of "modjy.jar", so that it can
* be added to the sys.path and thus imported
*
* @param The properties from which config options are found
* @returns A String giving the path to the modjy.jar file
*/
protected String locateModjyJar ( Properties props )
{
// Give priority to modjy_jar.location
if (props.get("modjy_jar.location") != null)
return (String)props.get("modjy_jar.location");
// Then try to find it in WEB-INF/lib
String location = getServletContext().getRealPath("/WEB-INF/lib/modjy.jar");
if (location != null)
{
File f = new File(location);
if (f.exists())
return location;
}
// Try finding the archive that this class was loaded from
try
{ return this.getClass().getProtectionDomain().getCodeSource().getLocation().getFile(); }
catch (Exception x)
{ return null;}
}
/**
* Do all processing in relation to the lib-python subdirectory of WEB-INF
*
* @param interp - The PythinInterpreter used to service requests
* @param systemState - The PySystemState whose path should be updated
*/
protected void processPythonLib(PythonInterpreter interp, PySystemState systemState)
{
// Add the lib-python directory to sys.path
String pythonLibPath = getServletContext().getRealPath(LIB_PYTHON);
if (pythonLibPath == null)
return;
File pythonLib = new File(pythonLibPath);
if (!pythonLib.exists())
return;
systemState.path.append(new PyString(pythonLibPath));
// Now check for .pth files in lib-python and process each one
String[] libPythonContents = pythonLib.list();
for (int ix = 0 ; ix < libPythonContents.length ; ix++)
if (libPythonContents[ix].endsWith(PTH_FILE_EXTENSION))
processPthFile(interp, systemState, pythonLibPath, libPythonContents[ix]);
}
/**
* Process an individual file .pth file in the lib-python directory
*
* @param systemState - The PySystemState whose path should be updated
* @param pythonLibPath - The actual path to the lib-python directory
* @param pthFilename - The PySystemState whose path should be updated
*/
protected void processPthFile(PythonInterpreter interp, PySystemState systemState, String pythonLibPath, String pthFilename)
{
try
{
LineNumberReader lineReader = new LineNumberReader(new FileReader(new File(pythonLibPath, pthFilename)));
String line;
while ((line = lineReader.readLine()) != null)
{
line = line.trim();
if (line.length() == 0)
continue;
if (line.startsWith("#"))
continue;
if (line.startsWith("import"))
interp.exec(line);
File archiveFile = new File(pythonLibPath, line);
String archiveRealpath = archiveFile.getAbsolutePath();
systemState.path.append(new PyString(archiveRealpath));
}
}
catch (IOException iox)
{
System.err.println("IOException: " + iox.toString());
}
}
}
|
|
package org.apereo.cas.adaptors.yubikey.dao;
import org.apereo.cas.adaptors.yubikey.YubiKeyAccount;
import org.apereo.cas.adaptors.yubikey.YubiKeyRegisteredDevice;
import org.apereo.cas.configuration.model.support.mfa.yubikey.YubiKeyDynamoDbMultifactorProperties;
import org.apereo.cas.dynamodb.DynamoDbQueryBuilder;
import org.apereo.cas.dynamodb.DynamoDbTableUtils;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.DateTimeUtils;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import software.amazon.awssdk.services.dynamodb.DynamoDbClient;
import software.amazon.awssdk.services.dynamodb.model.AttributeAction;
import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition;
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate;
import software.amazon.awssdk.services.dynamodb.model.ComparisonOperator;
import software.amazon.awssdk.services.dynamodb.model.DeleteItemRequest;
import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement;
import software.amazon.awssdk.services.dynamodb.model.KeyType;
import software.amazon.awssdk.services.dynamodb.model.PutItemRequest;
import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType;
import software.amazon.awssdk.services.dynamodb.model.UpdateItemRequest;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* This is {@link DynamoDbYubiKeyFacilitator}.
*
* @author Misagh Moayyed
* @since 6.3.0
*/
@Slf4j
public class DynamoDbYubiKeyFacilitator {
private final YubiKeyDynamoDbMultifactorProperties dynamoDbProperties;
private final DynamoDbClient amazonDynamoDBClient;
public DynamoDbYubiKeyFacilitator(final YubiKeyDynamoDbMultifactorProperties dynamoDbProperties,
final DynamoDbClient amazonDynamoDBClient) {
this.dynamoDbProperties = dynamoDbProperties;
this.amazonDynamoDBClient = amazonDynamoDBClient;
if (!dynamoDbProperties.isPreventTableCreationOnStartup()) {
createTable(dynamoDbProperties.isDropTablesOnStartup());
}
}
private static AttributeValue toAttributeValue(final YubiKeyAccount account) {
val devices = account.getDevices().stream()
.map(device -> AttributeValue.builder()
.m(Map.of(
"id", AttributeValue.builder().n(String.valueOf(device.getId())).build(),
"name", AttributeValue.builder().s(device.getName()).build(),
"publicId", AttributeValue.builder().s(device.getPublicId()).build(),
"registrationDate", AttributeValue.builder().s(device.getRegistrationDate().toString()).build()
))
.build())
.collect(Collectors.toList());
return AttributeValue.builder().l(devices).build();
}
private static YubiKeyRegisteredDevice toYubiKeyRegisteredDevice(final Map<String, AttributeValue> map) {
return YubiKeyRegisteredDevice.builder()
.id(Long.parseLong(map.get("id").n()))
.name(map.get("name").s())
.publicId(map.get("publicId").s())
.registrationDate(DateTimeUtils.zonedDateTimeOf(map.get("registrationDate").s()))
.build();
}
/**
* Build table attribute values map.
*
* @param record the record
* @return the map
*/
private static Map<String, AttributeValue> buildTableAttributeValuesMap(final YubiKeyAccount record) {
val values = new HashMap<String, AttributeValue>();
values.put(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build());
values.put(ColumnNames.USERNAME.getColumnName(), AttributeValue.builder().s(String.valueOf(record.getUsername())).build());
values.put(ColumnNames.DEVICE_IDENTIFIERS.getColumnName(), toAttributeValue(record));
LOGGER.debug("Created attribute values [{}] based on [{}]", values, record);
return values;
}
/**
* Create tables.
*
* @param deleteTables the delete tables
*/
@SneakyThrows
public void createTable(final boolean deleteTables) {
DynamoDbTableUtils.createTable(amazonDynamoDBClient, dynamoDbProperties,
dynamoDbProperties.getTableName(), deleteTables,
List.of(AttributeDefinition.builder()
.attributeName(ColumnNames.USERNAME.getColumnName())
.attributeType(ScalarAttributeType.S)
.build()),
List.of(KeySchemaElement.builder()
.attributeName(ColumnNames.USERNAME.getColumnName())
.keyType(KeyType.HASH)
.build()));
}
/**
* Remove devices.
*/
public void removeDevices() {
createTable(true);
}
/**
* Gets accounts.
*
* @return the accounts
*/
public List<? extends YubiKeyAccount> getAccounts() {
return getRecordsByKeys();
}
/**
* Gets accounts.
*
* @param uid the uid
* @return the accounts
*/
public List<YubiKeyAccount> getAccounts(final String uid) {
return getRecordsByKeys(
DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.EQ)
.attributeValue(List.of(AttributeValue.builder().s(uid).build()))
.key(ColumnNames.USERNAME.getColumnName())
.build());
}
/**
* Delete.
*
* @param username the username
* @param deviceId the device id
*/
public void delete(final String username, final long deviceId) {
val accounts = getAccounts(username);
if (!accounts.isEmpty()) {
val account = accounts.get(0);
if (account != null && account.getDevices().removeIf(device -> device.getId() == deviceId)) {
update(account);
}
}
}
/**
* Delete.
*
* @param uid the uid
*/
public void delete(final String uid) {
val del = DeleteItemRequest.builder().tableName(dynamoDbProperties.getTableName())
.key(CollectionUtils.wrap(ColumnNames.USERNAME.getColumnName(), AttributeValue.builder().s(uid).build()))
.build();
amazonDynamoDBClient.deleteItem(del);
}
/**
* Save.
*
* @param registration the registration
* @return the boolean
*/
public boolean save(final YubiKeyAccount registration) {
val values = buildTableAttributeValuesMap(registration);
val putItemRequest = PutItemRequest.builder().tableName(dynamoDbProperties.getTableName()).item(values).build();
LOGGER.debug("Submitting put request [{}] for record [{}]", putItemRequest, registration);
val putItemResult = amazonDynamoDBClient.putItem(putItemRequest);
LOGGER.debug("Record added with result [{}]", putItemResult);
return true;
}
/**
* Save.
*
* @param registration the registration
* @return the boolean
*/
public boolean update(final YubiKeyAccount registration) {
val updateRequest = UpdateItemRequest.builder()
.tableName(dynamoDbProperties.getTableName())
.key(Map.of(ColumnNames.USERNAME.getColumnName(),
AttributeValue.builder().s(String.valueOf(registration.getUsername())).build()))
.attributeUpdates(Map.of(ColumnNames.DEVICE_IDENTIFIERS.getColumnName(),
AttributeValueUpdate.builder().value(toAttributeValue(registration)).action(AttributeAction.PUT).build()))
.build();
LOGGER.debug("Submitting put request [{}] for record [{}]", updateRequest, registration);
val putItemResult = amazonDynamoDBClient.updateItem(updateRequest);
LOGGER.debug("Record added with result [{}]", putItemResult);
return true;
}
@Getter
@RequiredArgsConstructor
private enum ColumnNames {
ID("id"), USERNAME("username"), DEVICE_IDENTIFIERS("deviceIdentifiers");
private final String columnName;
}
@SneakyThrows
private List<YubiKeyAccount> getRecordsByKeys(final DynamoDbQueryBuilder... queries) {
return DynamoDbTableUtils.getRecordsByKeys(amazonDynamoDBClient, dynamoDbProperties.getTableName(),
Arrays.stream(queries).collect(Collectors.toList()),
item -> {
val id = Long.parseLong(item.get(ColumnNames.ID.getColumnName()).n());
val username = item.get(ColumnNames.USERNAME.getColumnName()).s();
val details = item.get(ColumnNames.DEVICE_IDENTIFIERS.getColumnName()).l();
val records = details.stream().map(value -> toYubiKeyRegisteredDevice(value.m())).collect(Collectors.toList());
return YubiKeyAccount.builder()
.id(id)
.username(username)
.devices(records)
.build();
})
.collect(Collectors.toList());
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/experimental/experimental.proto
package com.google.api;
/**
* <pre>
* Experimental service configuration. These configuration options can
* only be used by whitelisted users.
* </pre>
*
* Protobuf type {@code google.api.Experimental}
*/
public final class Experimental extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.api.Experimental)
ExperimentalOrBuilder {
// Use Experimental.newBuilder() to construct.
private Experimental(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Experimental() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private Experimental(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 66: {
com.google.api.AuthorizationConfig.Builder subBuilder = null;
if (authorization_ != null) {
subBuilder = authorization_.toBuilder();
}
authorization_ = input.readMessage(com.google.api.AuthorizationConfig.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(authorization_);
authorization_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.api.ExperimentalProto.internal_static_google_api_Experimental_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ExperimentalProto.internal_static_google_api_Experimental_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Experimental.class, com.google.api.Experimental.Builder.class);
}
public static final int AUTHORIZATION_FIELD_NUMBER = 8;
private com.google.api.AuthorizationConfig authorization_;
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public boolean hasAuthorization() {
return authorization_ != null;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public com.google.api.AuthorizationConfig getAuthorization() {
return authorization_ == null ? com.google.api.AuthorizationConfig.getDefaultInstance() : authorization_;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public com.google.api.AuthorizationConfigOrBuilder getAuthorizationOrBuilder() {
return getAuthorization();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (authorization_ != null) {
output.writeMessage(8, getAuthorization());
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (authorization_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, getAuthorization());
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.Experimental)) {
return super.equals(obj);
}
com.google.api.Experimental other = (com.google.api.Experimental) obj;
boolean result = true;
result = result && (hasAuthorization() == other.hasAuthorization());
if (hasAuthorization()) {
result = result && getAuthorization()
.equals(other.getAuthorization());
}
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAuthorization()) {
hash = (37 * hash) + AUTHORIZATION_FIELD_NUMBER;
hash = (53 * hash) + getAuthorization().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.Experimental parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Experimental parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Experimental parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Experimental parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Experimental parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Experimental parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Experimental parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.api.Experimental parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.api.Experimental parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.Experimental parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.api.Experimental parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.api.Experimental parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.Experimental prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Experimental service configuration. These configuration options can
* only be used by whitelisted users.
* </pre>
*
* Protobuf type {@code google.api.Experimental}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.api.Experimental)
com.google.api.ExperimentalOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.api.ExperimentalProto.internal_static_google_api_Experimental_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ExperimentalProto.internal_static_google_api_Experimental_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Experimental.class, com.google.api.Experimental.Builder.class);
}
// Construct using com.google.api.Experimental.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
if (authorizationBuilder_ == null) {
authorization_ = null;
} else {
authorization_ = null;
authorizationBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.api.ExperimentalProto.internal_static_google_api_Experimental_descriptor;
}
public com.google.api.Experimental getDefaultInstanceForType() {
return com.google.api.Experimental.getDefaultInstance();
}
public com.google.api.Experimental build() {
com.google.api.Experimental result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.api.Experimental buildPartial() {
com.google.api.Experimental result = new com.google.api.Experimental(this);
if (authorizationBuilder_ == null) {
result.authorization_ = authorization_;
} else {
result.authorization_ = authorizationBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.Experimental) {
return mergeFrom((com.google.api.Experimental)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.Experimental other) {
if (other == com.google.api.Experimental.getDefaultInstance()) return this;
if (other.hasAuthorization()) {
mergeAuthorization(other.getAuthorization());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.api.Experimental parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.api.Experimental) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.api.AuthorizationConfig authorization_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.api.AuthorizationConfig, com.google.api.AuthorizationConfig.Builder, com.google.api.AuthorizationConfigOrBuilder> authorizationBuilder_;
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public boolean hasAuthorization() {
return authorizationBuilder_ != null || authorization_ != null;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public com.google.api.AuthorizationConfig getAuthorization() {
if (authorizationBuilder_ == null) {
return authorization_ == null ? com.google.api.AuthorizationConfig.getDefaultInstance() : authorization_;
} else {
return authorizationBuilder_.getMessage();
}
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public Builder setAuthorization(com.google.api.AuthorizationConfig value) {
if (authorizationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
authorization_ = value;
onChanged();
} else {
authorizationBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public Builder setAuthorization(
com.google.api.AuthorizationConfig.Builder builderForValue) {
if (authorizationBuilder_ == null) {
authorization_ = builderForValue.build();
onChanged();
} else {
authorizationBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public Builder mergeAuthorization(com.google.api.AuthorizationConfig value) {
if (authorizationBuilder_ == null) {
if (authorization_ != null) {
authorization_ =
com.google.api.AuthorizationConfig.newBuilder(authorization_).mergeFrom(value).buildPartial();
} else {
authorization_ = value;
}
onChanged();
} else {
authorizationBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public Builder clearAuthorization() {
if (authorizationBuilder_ == null) {
authorization_ = null;
onChanged();
} else {
authorization_ = null;
authorizationBuilder_ = null;
}
return this;
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public com.google.api.AuthorizationConfig.Builder getAuthorizationBuilder() {
onChanged();
return getAuthorizationFieldBuilder().getBuilder();
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
public com.google.api.AuthorizationConfigOrBuilder getAuthorizationOrBuilder() {
if (authorizationBuilder_ != null) {
return authorizationBuilder_.getMessageOrBuilder();
} else {
return authorization_ == null ?
com.google.api.AuthorizationConfig.getDefaultInstance() : authorization_;
}
}
/**
* <pre>
* Authorization configuration.
* </pre>
*
* <code>.google.api.AuthorizationConfig authorization = 8;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.api.AuthorizationConfig, com.google.api.AuthorizationConfig.Builder, com.google.api.AuthorizationConfigOrBuilder>
getAuthorizationFieldBuilder() {
if (authorizationBuilder_ == null) {
authorizationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.api.AuthorizationConfig, com.google.api.AuthorizationConfig.Builder, com.google.api.AuthorizationConfigOrBuilder>(
getAuthorization(),
getParentForChildren(),
isClean());
authorization_ = null;
}
return authorizationBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.api.Experimental)
}
// @@protoc_insertion_point(class_scope:google.api.Experimental)
private static final com.google.api.Experimental DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.Experimental();
}
public static com.google.api.Experimental getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Experimental>
PARSER = new com.google.protobuf.AbstractParser<Experimental>() {
public Experimental parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Experimental(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Experimental> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Experimental> getParserForType() {
return PARSER;
}
public com.google.api.Experimental getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
package com.google.android.gms.drive.sample.quickstart.activity;
import android.app.Activity;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.IntentSender;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.support.v4.app.NotificationCompat;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.drive.Drive;
import com.google.android.gms.drive.DriveApi;
import com.google.android.gms.drive.DriveFile;
import com.google.android.gms.drive.DriveFolder;
import com.google.android.gms.drive.DriveId;
import com.google.android.gms.drive.DriveResource;
import com.google.android.gms.drive.Metadata;
import com.google.android.gms.drive.OpenFileActivityBuilder;
import com.google.android.gms.drive.events.ChangeEvent;
import com.google.android.gms.drive.events.ChangeListener;
import com.google.android.gms.drive.sample.quickstart.R;
import static com.google.android.gms.drive.DriveApi.*;
import static com.google.android.gms.drive.DriveResource.*;
public class ReceiveActivity extends Activity {
private static final int REQUEST_CODE_OPENER = 5;
private static final String TAG = "ReceiveActivity";
private static GoogleApiClient mGoogleApiClient;
private DriveId folderId_;
private String link_;
private Button bankQueryBtn_;
private Button reminderBtn_;
private Button viewReportsBtn_;
private View view_;
/**
* Represents the file picked by the user.
*/
private DriveId mSelectedFileId;
/**
* Keeps the status whether change events are being listened to or not.
*/
private boolean isSubscribed = false;
final private Object mSubscriptionStatusLock = new Object();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_receive);
view_ = findViewById(android.R.id.content);
mGoogleApiClient = MainActivity.getGoogleApiClient();
bankQueryBtn_ = (Button) findViewById(R.id.bank_query_btn);
reminderBtn_ = (Button) findViewById(R.id.reminder_btn);
viewReportsBtn_ = (Button) findViewById(R.id.reports_btn);
viewReportsBtn_.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
openReports();
}
});
}
@Override
protected void onResume() {
super.onResume();
if (!(mGoogleApiClient == null)) {
mGoogleApiClient.connect();
}
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
switch (requestCode) {
case REQUEST_CODE_OPENER:
if (resultCode == RESULT_OK) {
mSelectedFileId = (DriveId) data.getParcelableExtra(OpenFileActivityBuilder.EXTRA_RESPONSE_DRIVE_ID);
DriveFile selectedPdfReport = Drive.DriveApi.getFile(mGoogleApiClient, mSelectedFileId);
selectedPdfReport.getMetadata(mGoogleApiClient).setResultCallback(metadataRetrievedCallback);
}
break;
default:
super.onActivityResult(requestCode, resultCode, data);
}
}
ResultCallback<DriveResource.MetadataResult> metadataRetrievedCallback = new
ResultCallback<DriveResource.MetadataResult>() {
@Override
public void onResult(MetadataResult result) {
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to get metadata.");
return;
}
Metadata metadata = result.getMetadata();
Log.i(TAG, "Metadata was retrieved successfully.");
openPdf(metadata.getWebContentLink());
}
};
private void openPdf(String link){
Uri url = Uri.parse(link);
// Uri url = Uri.parse("http://goo.gl/forms/Z9FPs1TsVC");
Intent intent = new Intent(Intent.ACTION_VIEW, url);
if (intent.resolveActivity(getPackageManager()) != null){
startActivity(intent);
} else {
Log.d("MainActivity", "Couldn't call because no receiving apps installed!");
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_receive, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
private void openReports() {
Drive.DriveApi.newDriveContents(mGoogleApiClient)
.setResultCallback(new ResultCallback<DriveApi.DriveContentsResult>() {
@Override
public void onResult(DriveApi.DriveContentsResult result) {
// If the operation was not successful, we cannot do anything
// and must
// fail.
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to create new contents.");
return;
}
// Create an intent for the file chooser, and start it.
IntentSender intentSender = Drive.DriveApi
.newOpenFileActivityBuilder()
.setMimeType(new String[]{"application/pdf"})
.build(mGoogleApiClient);
try {
startIntentSenderForResult(
intentSender, REQUEST_CODE_OPENER, null, 0, 0, 0);
} catch (IntentSender.SendIntentException e) {
Log.i(TAG, "Failed to launch file chooser.");
}
}
});
}
/**
* This method opens Drive activity with list of folders and files
* and helps user to choose folder for listening changes
*/
private void selectFolderForChangeListening(){
Drive.DriveApi.newDriveContents(mGoogleApiClient)
.setResultCallback(new ResultCallback<DriveApi.DriveContentsResult>() {
@Override
public void onResult(DriveApi.DriveContentsResult result) {
// If the operation was not successful, we cannot do anything
// and must
// fail.
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to create new contents.");
return;
}
// Create an intent for the file chooser, and start it.
IntentSender intentSender = Drive.DriveApi
.newOpenFileActivityBuilder()
.setMimeType(new String[]{"application/vnd.google-apps.folder"})
.build(mGoogleApiClient);
try {
startIntentSenderForResult(
intentSender, REQUEST_CODE_OPENER, null, 0, 0, 0);
} catch (IntentSender.SendIntentException e) {
Log.i(TAG, "Failed to launch file chooser.");
}
}
});
}
private void getFolder(){
DriveFolder folder = Drive.DriveApi.getRootFolder(mGoogleApiClient);
folder.listChildren(mGoogleApiClient).setResultCallback(childrenRetrievedCallback);
}
ResultCallback<DriveApi.MetadataBufferResult> childrenRetrievedCallback = new
ResultCallback<DriveApi.MetadataBufferResult>() {
@Override
public void onResult(MetadataBufferResult result) {
if (!result.getStatus().isSuccess()) {
Toast.makeText(ReceiveActivity.this, "Problem while retrieving folders", Toast.LENGTH_LONG);
Log.v(TAG,"Problem while retrieving folders");
return;
}
for (int i=0;i<result.getMetadataBuffer().getCount();i++){
String originalFileName = result.getMetadataBuffer().get(i).getOriginalFilename();
Log.v(TAG, originalFileName);
if (originalFileName.equals("Reports")) {
folderId_ = result.getMetadataBuffer().get(i).getDriveId();
DriveFolder reportsFolder = Drive.DriveApi.getFolder(mGoogleApiClient,folderId_);
reportsFolder.listChildren(mGoogleApiClient).setResultCallback(reportsResultCallback);
link_ = result.getMetadataBuffer().get(i).getWebViewLink();
break;
}
}
}
};
ResultCallback<DriveApi.MetadataBufferResult> reportsResultCallback = new ResultCallback<MetadataBufferResult>() {
@Override
public void onResult(MetadataBufferResult metadataBufferResult) {
if (!metadataBufferResult.getStatus().isSuccess()) {
Toast.makeText(ReceiveActivity.this, "Problem while retrieving files", Toast.LENGTH_LONG);
Log.v(TAG, "Problem while retrieving files");
return;
}
String filesCount = Integer.toString(metadataBufferResult.getMetadataBuffer().getCount());
Log.v(TAG,filesCount);
// for (int i=0;i<metadataBufferResult.getMetadataBuffer().getCount();i++){
String reportFileName = metadataBufferResult.getMetadataBuffer().get(0).getOriginalFilename();
Log.v("File name", reportFileName );
String reportFileTitle = metadataBufferResult.getMetadataBuffer().get(0).getTitle();
Log.v("File title", reportFileTitle );
String fileLink = metadataBufferResult.getMetadataBuffer().get(0).getWebViewLink();
Log.v("File link", fileLink );
// }
openGoogleSheets(fileLink);
}
};
private void openGoogleSheets(String link){
Uri url = Uri.parse(link);
Intent intent = new Intent(Intent.ACTION_VIEW, url);
if (intent.resolveActivity(getPackageManager()) != null){
startActivity(intent);
} else {
Log.d("MainActivity", "Couldn't call because no receiving apps installed!");
}
}
/**
* A listener to handle file change events.
*/
final private ChangeListener changeListener = new ChangeListener() {
@Override
public void onChange(ChangeEvent event) {
sendNotification(view_);
}
};
private void sendNotification(View view) {
Intent intent = new Intent(ReceiveActivity.this, ReceiveActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(ReceiveActivity.this, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder builder = new NotificationCompat.Builder(ReceiveActivity.this);
builder.setSmallIcon(R.drawable.ic_launcher);
// Set the intent that will fire when the user taps the notification.
builder.setContentIntent(pendingIntent);
// Set the notification to auto-cancel. This means that the notification will disappear
// after the user taps it, rather than remaining until it's explicitly dismissed.
builder.setAutoCancel(true);
builder.setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher));
builder.setContentTitle("BasicNotifications Sample");
builder.setContentText("Time to learn about notifications!");
builder.setSubText("Tap to view documentation about notifications.");
NotificationManager notificationManager = (NotificationManager)ReceiveActivity.this.getSystemService(
Context.NOTIFICATION_SERVICE);
notificationManager.notify(0, builder.build());
}
/**
* Toggles the subscription status. If there is no selected file, returns
* immediately.
*/
private void toggle() {
if (mSelectedFileId == null) {
return;
}
synchronized (mSubscriptionStatusLock) {
DriveFile file = mSelectedFileId.asDriveFile();
if (!isSubscribed) {
Log.d(TAG, "Starting to listen to the file changes.");
file.addChangeListener(mGoogleApiClient, changeListener);
isSubscribed = true;
} else {
Log.d(TAG, "Stopping to listen to the file changes.");
file.removeChangeListener(mGoogleApiClient, changeListener);
isSubscribed = false;
}
}
//refresh();
}
}
|
|
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.coeus.common.budget.impl.calculator;
import org.kuali.coeus.sys.api.model.ScaleTwoDecimal;
/**
* Holds all the calculated amounts for a rate class - rate type combination for each
* breakup interval.
*
*/
public class RateAndCost{
@Override
public String toString() {
return "RateAndCost [rateClassType=" + rateClassType + ", rateClassCode=" + rateClassCode + ", rateTypeCode="
+ rateTypeCode + ", applyRateFlag=" + applyRateFlag + ", appliedRate=" + appliedRate + ", calculatedCost="
+ calculatedCost + ", calculatedCostSharing=" + calculatedCostSharing + ", underRecovery=" + underRecovery
+ ", baseAmount=" + baseAmount + ", baseCostSharingAmount=" + baseCostSharingAmount + ", calculated=" + calculated
+ "]";
}
private String rateClassType;
private String rateClassCode;
private String rateTypeCode;
private Boolean applyRateFlag;
private ScaleTwoDecimal appliedRate;
private ScaleTwoDecimal calculatedCost;
private ScaleTwoDecimal calculatedCostSharing;
private ScaleTwoDecimal underRecovery;
private ScaleTwoDecimal baseAmount;
private ScaleTwoDecimal baseCostSharingAmount;
private boolean calculated;
/** Getter for property rateClassCode.
* @return Value of property rateClassCode.
*/
public String getRateClassCode() {
return rateClassCode;
}
/** Setter for property rateClassCode.
* @param rateClassCode New value of property rateClassCode.
*/
public void setRateClassCode(String rateClassCode) {
this.rateClassCode = rateClassCode;
}
/** Getter for property rateTypeCode.
* @return Value of property rateTypeCode.
*/
public String getRateTypeCode() {
return rateTypeCode;
}
/** Setter for property rateTypeCode.
* @param rateTypeCode New value of property rateTypeCode.
*/
public void setRateTypeCode(String rateTypeCode) {
this.rateTypeCode = rateTypeCode;
}
/** Getter for property applyRateFlag.
* @return Value of property applyRateFlag.
*/
public Boolean isApplyRateFlag() {
return applyRateFlag;
}
/** Setter for property applyRateFlag.
* @param applyRateFlag New value of property applyRateFlag.
*/
public void setApplyRateFlag(Boolean applyRateFlag) {
this.applyRateFlag = applyRateFlag;
}
/** Getter for property calculatedCost.
* @return Value of property calculatedCost.
*/
public ScaleTwoDecimal getCalculatedCost() {
return calculatedCost;
}
/** Setter for property calculatedCost.
* @param calculatedCost New value of property calculatedCost.
*/
public void setCalculatedCost(ScaleTwoDecimal calculatedCost) {
this.calculatedCost = calculatedCost;
}
/** Getter for property calculatedCostSharing.
* @return Value of property calculatedCostSharing.
*/
public ScaleTwoDecimal getCalculatedCostSharing() {
return calculatedCostSharing;
}
/** Setter for property calculatedCostSharing.
* @param calculatedCostSharing New value of property calculatedCostSharing.
*/
public void setCalculatedCostSharing(ScaleTwoDecimal calculatedCostSharing) {
this.calculatedCostSharing = calculatedCostSharing;
}
/** Getter for property rateClassType.
* @return Value of property rateClassType.
*/
public java.lang.String getRateClassType() {
return rateClassType;
}
/** Setter for property rateClassType.
* @param rateClassType New value of property rateClassType.
*/
public void setRateClassType(java.lang.String rateClassType) {
this.rateClassType = rateClassType;
}
/** Getter for property underRecovery.
* @return Value of property underRecovery.
*/
public ScaleTwoDecimal getUnderRecovery() {
return underRecovery==null? ScaleTwoDecimal.ZERO:underRecovery;
}
/** Setter for property underRecovery.
* @param underRecovery New value of property underRecovery.
*/
public void setUnderRecovery(ScaleTwoDecimal underRecovery) {
this.underRecovery = underRecovery;
}
/** Getter for property appliedRate.
* @return Value of property appliedRate.
*
*/
public ScaleTwoDecimal getAppliedRate() {
return appliedRate;
}
/** Setter for property appliedRate.
* @param appliedRate New value of property appliedRate.
*
*/
public void setAppliedRate(ScaleTwoDecimal appliedRate) {
this.appliedRate = appliedRate;
}
public boolean equals(Object obj) {
RateAndCost amountBean = (RateAndCost)obj;
if(amountBean.getRateClassType().equals(getRateClassType()) &&
amountBean.getRateClassCode() == getRateClassCode() &&
amountBean.getRateTypeCode() == getRateTypeCode()){
return true;
}else {
return false;
}
}
/**
* Getter for property baseAmount.
* @return Value of property baseAmount.
*/
public ScaleTwoDecimal getBaseAmount() {
return baseAmount==null? ScaleTwoDecimal.ZERO:baseAmount;
}
/**
* Setter for property baseAmount.
* @param baseAmount New value of property baseAmount.
*/
public void setBaseAmount(ScaleTwoDecimal baseAmount) {
this.baseAmount = baseAmount;
}
/**
* Gets the baseCostSharingAmount attribute.
* @return Returns the baseCostSharingAmount.
*/
public ScaleTwoDecimal getBaseCostSharingAmount() {
return baseCostSharingAmount==null? ScaleTwoDecimal.ZERO:baseCostSharingAmount;
}
/**
* Sets the baseCostSharingAmount attribute value.
* @param baseCostSharingAmount The baseCostSharingAmount to set.
*/
public void setBaseCostSharingAmount(ScaleTwoDecimal baseCostSharingAmount) {
this.baseCostSharingAmount = baseCostSharingAmount;
}
/**
* Gets the calculated attribute.
* @return Returns the calculated.
*/
public boolean isCalculated() {
return calculated;
}
/**
* Sets the calculated attribute value.
* @param calculated The calculated to set.
*/
public void setCalculated(boolean calculated) {
this.calculated = calculated;
}
}
|
|
package edu.uw.zookeeper.protocol.client;
import java.lang.ref.Reference;
import java.util.Iterator;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.logging.log4j.Logger;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Queues;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import edu.uw.zookeeper.common.Actors;
import edu.uw.zookeeper.common.Automaton;
import edu.uw.zookeeper.common.Promise;
import edu.uw.zookeeper.common.PromiseTask;
import edu.uw.zookeeper.common.TimeValue;
import edu.uw.zookeeper.net.Connection;
import edu.uw.zookeeper.protocol.Message;
import edu.uw.zookeeper.protocol.ConnectMessage;
import edu.uw.zookeeper.protocol.Operation;
import edu.uw.zookeeper.protocol.ProtocolConnection;
import edu.uw.zookeeper.protocol.proto.OpCodeXid;
public abstract class PendingQueueClientExecutor<
I extends Operation.Request,
O extends Operation.ProtocolResponse<?>,
T extends PendingQueueClientExecutor.RequestTask<I,O>,
C extends ProtocolConnection<? super Message.ClientSession, ? extends Operation.Response,?,?,?>,
V extends PendingQueueClientExecutor.PendingTask>
extends AbstractConnectionClientExecutor<I,O,T,C,V> {
protected final Pending<O,V,C> pending;
protected PendingQueueClientExecutor(
Logger logger,
ListenableFuture<ConnectMessage.Response> session,
C connection,
TimeValue timeOut,
ScheduledExecutorService scheduler) {
super(session, connection, timeOut, scheduler);
this.pending = Pending.<O,V,C>create(connection, this, logger);
}
protected PendingQueueClientExecutor(
Logger logger,
ListenableFuture<ConnectMessage.Response> session,
C connection,
Listeners listeners,
TimeOutServer<Operation.Response> timer,
AtomicReference<Throwable> failure) {
super(session, connection, listeners, timer, failure);
this.pending = Pending.<O,V,C>create(connection, this, logger);
}
@Override
public void handleConnectionRead(Operation.Response message) {
super.handleConnectionRead(message);
pending.handleConnectionRead(message);
}
@Override
public void handleConnectionState(Automaton.Transition<Connection.State> event) {
super.handleConnectionState(event);
pending.handleConnectionState(event);
}
@Override
protected void doStop() {
pending.stop();
super.doStop();
}
protected Executor executor() {
return connection;
}
public static class PendingTask
extends PromiseTask<Reference<? extends Message.ClientRequest<?>>, Message.ServerResponse<?>>
implements Operation.RequestId, FutureCallback<Message.ClientRequest<?>> {
public static PendingTask create(
Reference<? extends Message.ClientRequest<?>> task,
Promise<Message.ServerResponse<?>> delegate) {
return new PendingTask(task, delegate);
}
protected final int xid;
public PendingTask(
Reference<? extends Message.ClientRequest<?>> task,
Promise<Message.ServerResponse<?>> delegate) {
super(task, delegate);
this.xid = task().get().xid();
}
@Override
public int xid() {
return xid;
}
public Message.ClientRequest<?> getRequest() {
return task().get();
}
@Override
public void onSuccess(Message.ClientRequest<?> result) {
}
@Override
public void onFailure(Throwable t) {
setException(t);
}
@Override
protected MoreObjects.ToStringHelper toStringHelper(MoreObjects.ToStringHelper toString) {
return super.toStringHelper(toString.add("xid", xid));
}
}
public static class PendingPromiseTask extends PendingTask {
public static PendingPromiseTask create(
Promise<Message.ServerResponse<?>> promise,
Reference<? extends Message.ClientRequest<?>> task,
Promise<Message.ServerResponse<?>> delegate) {
return new PendingPromiseTask(promise, task, delegate);
}
protected final Promise<Message.ServerResponse<?>> promise;
public PendingPromiseTask(
Promise<Message.ServerResponse<?>> promise,
Reference<? extends Message.ClientRequest<?>> task,
Promise<Message.ServerResponse<?>> delegate) {
super(task, delegate);
this.promise = promise;
}
public Promise<Message.ServerResponse<?>> getPromise() {
return promise;
}
}
public static class Pending<
O extends Operation.ProtocolResponse<?>,
T extends PendingTask,
C extends ProtocolConnection<? super Message.ClientSession, ? extends Operation.Response,?,?,?>> extends Actors.ExecutedPeekingQueuedActor<T> implements Connection.Listener<Operation.Response> {
public static <O extends Operation.ProtocolResponse<?>,T extends PendingTask,C extends ProtocolConnection<? super Message.ClientSession, ? extends Operation.Response,?,?,?>> Pending<O,T,C> create(
C connection,
FutureCallback<? super T> callback,
Logger logger) {
return new Pending<O,T,C>(connection, callback, Queues.<T>newConcurrentLinkedQueue(), logger);
}
protected final FutureCallback<? super T> callback;
protected final C connection;
protected Pending(
C connection,
FutureCallback<? super T> callback,
Queue<T> mailbox,
Logger logger) {
super(connection, mailbox, logger);
this.callback = callback;
this.connection = connection;
}
@Override
protected synchronized boolean doSend(T message) {
// we use synchronized to preserve queue/send ordering
// task needs to be in the queue before calling write
if (! mailbox.offer(message)) {
return false;
}
try {
if (! message.isDone()) {
// mark pings as done on send because ZooKeeper doesn't care about their ordering
if (message.xid() == OpCodeXid.PING.xid()) {
message.set(null);
}
Message.ClientRequest<?> request = message.getRequest();
if (request != null) {
ListenableFuture<? extends Message.ClientRequest<?>> writeFuture = connection.write(request);
Futures.addCallback(writeFuture, message);
}
}
} catch (Throwable t) {
message.onFailure(t);
}
if (state() == State.TERMINATED) {
message.cancel(true);
apply(message);
}
if (! message.isDone()) {
message.addListener(this, executor);
} else {
run();
}
return true;
}
@Override
public void handleConnectionRead(Operation.Response message) {
if (message instanceof Message.ServerResponse<?>) {
int xid = ((Message.ServerResponse<?>) message).xid();
if (! ((xid == OpCodeXid.PING.xid()) || (xid == OpCodeXid.NOTIFICATION.xid()))) {
Iterator<T> tasks = mailbox.iterator();
T task = null;
while (tasks.hasNext()) {
T next = tasks.next();
if ((next.xid() == xid) && !next.isDone()) {
task = next;
break;
}
}
if (task != null) {
task.set((Message.ServerResponse<?>) message);
} else if (state() != State.TERMINATED) {
// This could happen if someone submitted a message without
// going through us
// or, it could be a bug
logger.warn("{} xid doesn't match {} ({})", message, mailbox.peek(), this);
}
}
}
}
@Override
public void handleConnectionState(Automaton.Transition<Connection.State> state) {
// TODO
}
public boolean isReady() {
T next = mailbox.peek();
return ((next != null) && next.isDone());
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).addValue(callback).toString();
}
@Override
protected void doStop() {
T task;
while ((task = mailbox.peek()) != null) {
task.cancel(true);
apply(task);
}
}
@Override
protected synchronized boolean apply(T input) {
if (input.isDone()) {
if (mailbox.remove(input)) {
callback.onSuccess(input);
return true;
}
}
return false;
}
}
public abstract class ForwardingActor extends Actors.ExecutedQueuedActor<T> {
protected ForwardingActor(Executor executor,
Logger logger) {
super(executor, new ConcurrentLinkedQueue<T>(), logger);
}
public Logger logger() {
return logger;
}
@Override
protected void doStop() {
PendingQueueClientExecutor.this.doStop();
T request;
while ((request = mailbox.poll()) != null) {
request.cancel(true);
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).addValue(PendingQueueClientExecutor.this).toString();
}
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.types.DfPrimitiveType;
import com.intellij.codeInspection.dataFlow.types.DfReferenceType;
import com.intellij.codeInspection.dataFlow.types.DfType;
import com.intellij.codeInspection.dataFlow.value.*;
import com.intellij.psi.*;
import com.intellij.psi.impl.compiled.ClsParameterImpl;
import com.intellij.psi.util.JavaElementKind;
import com.intellij.psi.util.TypeConversionUtil;
import com.siyeh.ig.psiutils.MethodCallUtils;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.OptionalInt;
import java.util.function.Function;
import java.util.function.UnaryOperator;
public abstract class ContractValue
{
// package private to avoid uncontrolled implementations
ContractValue()
{
}
abstract DfaValue makeDfaValue(DfaValueFactory factory, DfaCallArguments arguments);
@Nonnull
DfaCondition makeCondition(DfaValueFactory factory, DfaCallArguments arguments)
{
return DfaCondition.getUnknown();
}
public DfaCondition fromCall(DfaValueFactory factory, PsiCallExpression call)
{
DfaCallArguments arguments = DfaCallArguments.fromCall(factory, call);
if(arguments == null)
{
return DfaCondition.getUnknown();
}
return makeCondition(factory, arguments);
}
/**
* @param other other contract condition
* @return true if this contract condition and other condition cannot be fulfilled at the same time
*/
public boolean isExclusive(ContractValue other)
{
return false;
}
public ContractValue invert()
{
return null;
}
/**
* @return true if this contract value represents a bounds-checking condition
*/
public boolean isBoundCheckingCondition()
{
return false;
}
public DfaCallArguments updateArguments(DfaCallArguments arguments, boolean negated)
{
return arguments;
}
public OptionalInt getNullCheckedArgument(boolean equalToNull)
{
return getArgumentComparedTo(nullValue(), equalToNull);
}
public OptionalInt getArgumentComparedTo(ContractValue value, boolean equal)
{
return OptionalInt.empty();
}
@Nonnull
DfaCallArguments fixArgument(@Nonnull DfaCallArguments arguments, @Nonnull UnaryOperator<DfType> converter)
{
return arguments;
}
public String getPresentationText(PsiMethod method)
{
return toString();
}
/**
* @param call call to find the place in
* @return the expression in the call that is the most relevant to the current value
*/
public PsiExpression findPlace(PsiCallExpression call)
{
return null;
}
public static ContractValue qualifier()
{
return Qualifier.INSTANCE;
}
public static ContractValue argument(int index)
{
return new Argument(index);
}
public ContractValue specialField(@Nonnull SpecialField field)
{
return new Spec(this, field);
}
public static ContractValue constant(Object value, @Nonnull PsiType type)
{
return new IndependentValue(String.valueOf(value), factory -> factory.getConstant(TypeConversionUtil.computeCastTo(value, type), type)
);
}
public static ContractValue booleanValue(boolean value)
{
return value ? IndependentValue.TRUE : IndependentValue.FALSE;
}
public static ContractValue nullValue()
{
return IndependentValue.NULL;
}
public static ContractValue zero()
{
return IndependentValue.ZERO;
}
public static ContractValue condition(ContractValue left, RelationType relation, ContractValue right)
{
return new Condition(left, relation, right);
}
private static class Qualifier extends ContractValue
{
static final Qualifier INSTANCE = new Qualifier();
@Override
DfaValue makeDfaValue(DfaValueFactory factory, DfaCallArguments arguments)
{
return arguments.myQualifier;
}
@Override
public PsiExpression findPlace(PsiCallExpression call)
{
if(call instanceof PsiMethodCallExpression)
{
return ((PsiMethodCallExpression) call).getMethodExpression().getQualifierExpression();
}
return null;
}
@Override
@Nonnull
DfaCallArguments fixArgument(@Nonnull DfaCallArguments arguments, @Nonnull UnaryOperator<DfType> converter)
{
if(arguments.myQualifier instanceof DfaTypeValue)
{
DfType type = arguments.myQualifier.getDfType();
DfType newType = converter.apply(type);
if(!type.equals(newType))
{
return new DfaCallArguments(arguments.myQualifier.getFactory().fromDfType(newType), arguments.myArguments, arguments.myMutation);
}
}
return arguments;
}
@Override
public String toString()
{
return "this";
}
}
private static final class Argument extends ContractValue
{
private final int myIndex;
Argument(int index)
{
myIndex = index;
}
@Override
DfaValue makeDfaValue(DfaValueFactory factory, DfaCallArguments arguments)
{
if(arguments.myArguments.length <= myIndex)
{
return factory.getUnknown();
}
return arguments.myArguments[myIndex];
}
@Override
public PsiExpression findPlace(PsiCallExpression call)
{
PsiExpressionList list = call.getArgumentList();
if(list != null)
{
PsiExpression[] args = list.getExpressions();
if(myIndex < args.length - 1 || (myIndex == args.length - 1 && !MethodCallUtils.isVarArgCall(call)))
{
return args[myIndex];
}
}
return null;
}
@Override
public String getPresentationText(PsiMethod method)
{
PsiParameter[] params = method.getParameterList().getParameters();
if(myIndex == 0 && params.length == 1)
{
return JavaElementKind.PARAMETER.subject();
}
if(myIndex < params.length)
{
PsiParameter param = params[myIndex];
if(param instanceof ClsParameterImpl && ((ClsParameterImpl) param).isAutoGeneratedName())
{
return "param" + (myIndex + 1);
}
return param.getName();
}
return toString();
}
@Override
@Nonnull
DfaCallArguments fixArgument(@Nonnull DfaCallArguments arguments, @Nonnull UnaryOperator<DfType> converter)
{
if(arguments.myArguments != null && arguments.myArguments.length > myIndex)
{
DfaValue value = arguments.myArguments[myIndex];
if(value instanceof DfaTypeValue)
{
DfType type = value.getDfType();
DfType newType = converter.apply(type);
if(!type.equals(newType))
{
DfaValue[] clone = arguments.myArguments.clone();
clone[myIndex] = value.getFactory().fromDfType(newType);
return new DfaCallArguments(arguments.myQualifier, clone, arguments.myMutation);
}
}
}
return arguments;
}
@Override
public boolean equals(Object obj)
{
return obj == this || (obj instanceof Argument && myIndex == ((Argument) obj).myIndex);
}
@Override
public String toString()
{
return "param" + (myIndex + 1);
}
}
private static class IndependentValue extends ContractValue
{
static final IndependentValue NULL = new IndependentValue("null", factory -> factory.getNull());
static final IndependentValue TRUE = new IndependentValue("true", factory -> factory.getBoolean(true))
{
@Override
public boolean isExclusive(ContractValue other)
{
return other == FALSE;
}
};
static final IndependentValue FALSE = new IndependentValue("false", factory -> factory.getBoolean(false))
{
@Override
public boolean isExclusive(ContractValue other)
{
return other == TRUE;
}
};
static final IndependentValue ZERO = new IndependentValue("0", factory -> factory.getInt(0));
private final Function<? super DfaValueFactory, ? extends DfaValue> mySupplier;
private final String myPresentation;
IndependentValue(String presentation, Function<? super DfaValueFactory, ? extends DfaValue> supplier)
{
mySupplier = supplier;
myPresentation = presentation;
}
@Override
DfaValue makeDfaValue(DfaValueFactory factory, DfaCallArguments arguments)
{
return mySupplier.apply(factory);
}
@Override
public String toString()
{
return myPresentation;
}
}
private static final class Spec extends ContractValue
{
private final
@Nonnull
ContractValue myQualifier;
private final
@Nonnull
SpecialField myField;
Spec(@Nonnull ContractValue qualifier, @Nonnull SpecialField field)
{
myQualifier = qualifier;
myField = field;
}
@Override
DfaValue makeDfaValue(DfaValueFactory factory, DfaCallArguments arguments)
{
return myField.createValue(factory, myQualifier.makeDfaValue(factory, arguments));
}
@Override
public boolean equals(Object obj)
{
if(obj == this)
{
return true;
}
if(!(obj instanceof Spec))
{
return false;
}
Spec that = (Spec) obj;
return myQualifier.equals(that.myQualifier) && myField == that.myField;
}
@Override
@Nonnull
DfaCallArguments fixArgument(@Nonnull DfaCallArguments arguments, @Nonnull UnaryOperator<DfType> converter)
{
return myQualifier.fixArgument(arguments, t -> {
if(!(t instanceof DfReferenceType))
{
return t;
}
DfType sfType = myField.getFromQualifier(t);
DfType newType = converter.apply(sfType);
return newType.equals(sfType) ? t : ((DfReferenceType) t).dropSpecialField().meet(myField.asDfType(newType));
});
}
@Override
public PsiExpression findPlace(PsiCallExpression call)
{
return myQualifier.findPlace(call);
}
@Override
public String getPresentationText(PsiMethod method)
{
return myQualifier.getPresentationText(method) + "." + myField + (myField == SpecialField.ARRAY_LENGTH ? "" : "()");
}
@Override
public String toString()
{
return myQualifier + "." + myField + "()";
}
}
/**
* A contract value that represents a relation between two other values
*/
public static class Condition extends ContractValue
{
private final ContractValue myLeft, myRight;
private final RelationType myRelationType;
Condition(ContractValue left, RelationType type, ContractValue right)
{
myLeft = left;
myRight = right;
myRelationType = type;
}
@Override
public boolean isBoundCheckingCondition()
{
switch(myRelationType)
{
case LE:
case LT:
case GE:
case GT:
return true;
default:
return false;
}
}
@Override
public boolean isExclusive(ContractValue other)
{
if(!(other instanceof Condition))
{
return false;
}
Condition that = (Condition) other;
if(that.myLeft.equals(myLeft) && that.myRight.equals(myRight) && that.myRelationType.getNegated() == myRelationType)
{
return true;
}
if(that.myLeft.equals(myRight) && that.myRight.equals(myLeft) && that.myRelationType.getNegated() == myRelationType.getFlipped())
{
return true;
}
if(that.myRelationType == myRelationType)
{
if(that.myLeft.equals(myLeft) && that.myRight.isExclusive(myRight))
{
return true;
}
if(that.myLeft.equals(myRight) && that.myRight.isExclusive(myLeft))
{
return true;
}
}
return false;
}
@Override
public DfaCallArguments updateArguments(DfaCallArguments arguments, boolean negated)
{
ContractValue target = getValueComparedTo(nullValue(), negated);
if(target != null)
{
return target.fixArgument(arguments, dfType -> dfType.meet(DfaNullability.NOT_NULL.asDfType()));
}
target = getValueComparedTo(nullValue(), !negated);
if(target != null)
{
return target.fixArgument(arguments, dfType -> dfType.meet(DfaNullability.NULL.asDfType()));
}
return arguments;
}
private
@Nullable
ContractValue getValueComparedTo(ContractValue value, boolean equal)
{
if(myRelationType == RelationType.equivalence(equal))
{
ContractValue other;
if(myLeft == value)
{
other = myRight;
}
else if(myRight == value)
{
other = myLeft;
}
else
{
return null;
}
return other;
}
if(value == IndependentValue.FALSE)
{
return getValueComparedTo(IndependentValue.TRUE, !equal);
}
return null;
}
@Override
public OptionalInt getArgumentComparedTo(ContractValue value, boolean equal)
{
ContractValue other = getValueComparedTo(value, equal);
return other instanceof Argument ? OptionalInt.of(((Argument) other).myIndex) : OptionalInt.empty();
}
@Override
DfaValue makeDfaValue(DfaValueFactory factory, DfaCallArguments arguments)
{
return factory.getUnknown();
}
@Nonnull
@Override
DfaCondition makeCondition(DfaValueFactory factory, DfaCallArguments arguments)
{
DfaValue left = myLeft.makeDfaValue(factory, arguments);
DfaValue right = myRight.makeDfaValue(factory, arguments);
if(left.getDfType() instanceof DfPrimitiveType)
{
right = DfaUtil.boxUnbox(right, left.getType());
}
if(right.getDfType() instanceof DfPrimitiveType)
{
left = DfaUtil.boxUnbox(left, right.getType());
}
return left.cond(myRelationType, right);
}
@Override
public String getPresentationText(PsiMethod method)
{
if(myLeft instanceof IndependentValue)
{
return myRight.getPresentationText(method) + " " + myRelationType.getFlipped() + " " + myLeft.getPresentationText(method);
}
return myLeft.getPresentationText(method) + " " + myRelationType + " " + myRight.getPresentationText(method);
}
/**
* @return condition relation type
*/
public
@Nonnull
RelationType getRelationType()
{
return myRelationType;
}
/**
* @return condition left operand
*/
public
@Nonnull
ContractValue getLeft()
{
return myLeft;
}
/**
* @return condition right operand
*/
public
@Nonnull
ContractValue getRight()
{
return myRight;
}
@Override
public ContractValue invert()
{
return new Condition(myLeft, myRelationType.getNegated(), myRight);
}
@Override
public String toString()
{
return myLeft + " " + myRelationType + " " + myRight;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.upgrade;
import static org.apache.jackrabbit.JcrConstants.JCR_PREDECESSORS;
import static org.apache.jackrabbit.JcrConstants.JCR_VERSIONHISTORY;
import static org.apache.jackrabbit.JcrConstants.MIX_VERSIONABLE;
import static org.apache.jackrabbit.oak.spi.version.VersionConstants.MIX_REP_VERSIONABLE_PATHS;
import static org.apache.jackrabbit.oak.upgrade.util.VersionCopyTestUtils.createLabeledVersions;
import static org.apache.jackrabbit.oak.upgrade.util.VersionCopyTestUtils.getOrAddNodeWithMixins;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.PropertyType;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.Value;
import javax.jcr.version.Version;
import javax.jcr.version.VersionHistory;
import javax.jcr.version.VersionManager;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.jackrabbit.core.RepositoryContext;
import org.apache.jackrabbit.core.config.RepositoryConfig;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.jcr.Jcr;
import org.apache.jackrabbit.oak.jcr.repository.RepositoryImpl;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.memory.MemoryStore;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.upgrade.util.VersionCopyTestUtils;
import org.apache.jackrabbit.oak.upgrade.util.VersionCopyTestUtils.VersionCopySetup;
import org.apache.jackrabbit.oak.upgrade.version.VersionCopyConfiguration;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Test;
public class CopyVersionHistoryTest extends AbstractRepositoryUpgradeTest {
private static final String VERSIONABLES_PATH_PREFIX = "/versionables/";
private static final String VERSIONABLES_OLD = "old";
private static final String VERSIONABLES_OLD_ORPHANED = "oldOrphaned";
private static final String VERSIONABLES_YOUNG = "young";
private static final String VERSIONABLES_YOUNG_ORPHANED = "youngOrphaned";
protected RepositoryImpl repository;
protected List<Session> sessions = Lists.newArrayList();
private static Calendar betweenHistories;
private static Map<String, String> pathToVersionHistory = Maps.newHashMap();
/**
* Home directory of source repository.
*/
private static File source;
private static String[] MIXINS;
@Override
protected void createSourceContent(Session session) throws Exception {
if (hasSimpleVersioningSupport(session.getRepository())) {
MIXINS = new String[] { "mix:simpleVersionable", MIX_VERSIONABLE };
} else {
MIXINS = new String[] { MIX_VERSIONABLE };
}
final Node root = session.getRootNode();
for (final String mixinType : MIXINS) {
final Node parent = VersionCopyTestUtils.getOrAddNode(root, rel(VERSIONABLES_PATH_PREFIX + mixinType));
final Node oldNode = getOrAddNodeWithMixins(parent, VERSIONABLES_OLD, mixinType);
pathToVersionHistory.put(oldNode.getPath(), createLabeledVersions(oldNode));
final Node oldOrphanNode = getOrAddNodeWithMixins(parent, VERSIONABLES_OLD_ORPHANED, mixinType);
pathToVersionHistory.put(oldOrphanNode.getPath(), createLabeledVersions(oldOrphanNode));
}
Thread.sleep(10);
betweenHistories = Calendar.getInstance();
Thread.sleep(10);
for (final String mixinType : MIXINS) {
final Node parent = VersionCopyTestUtils.getOrAddNode(root, rel(VERSIONABLES_PATH_PREFIX + mixinType));
final Node youngNode = getOrAddNodeWithMixins(parent, VERSIONABLES_YOUNG, mixinType);
pathToVersionHistory.put(youngNode.getPath(), createLabeledVersions(youngNode));
final Node youngOrphanNode = getOrAddNodeWithMixins(parent, VERSIONABLES_YOUNG_ORPHANED, mixinType);
pathToVersionHistory.put(youngOrphanNode.getPath(), createLabeledVersions(youngOrphanNode));
// create orphaned version histories by deleting the original nodes
parent.getNode(VERSIONABLES_OLD_ORPHANED).remove();
parent.getNode(VERSIONABLES_YOUNG_ORPHANED).remove();
}
session.save();
}
private boolean hasSimpleVersioningSupport(final Repository repository) {
return Boolean.parseBoolean(repository.getDescriptor(Repository.OPTION_SIMPLE_VERSIONING_SUPPORTED));
}
@Override
protected void doUpgradeRepository(File source, NodeStore target) throws RepositoryException {
// abuse this method to capture the source repo directory
CopyVersionHistoryTest.source = source;
}
@AfterClass
public static void teardown() {
CopyVersionHistoryTest.pathToVersionHistory.clear();
CopyVersionHistoryTest.source = null;
}
@Test
public void copyAllVersions() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
// copying all versions is enabled by default
}
});
assertVersionableProperties(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertExistingHistories(session,
VERSIONABLES_OLD, VERSIONABLES_OLD_ORPHANED, VERSIONABLES_YOUNG, VERSIONABLES_YOUNG_ORPHANED);
assertVersionablePaths(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertVersionsCanBeRestored(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
}
@Test
public void referencedSinceDate() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyVersions(betweenHistories);
}
});
assertVersionableProperties(session, VERSIONABLES_YOUNG);
assertExistingHistories(session, VERSIONABLES_YOUNG, VERSIONABLES_YOUNG_ORPHANED);
assertVersionablePaths(session, VERSIONABLES_YOUNG);
assertMissingHistories(session, VERSIONABLES_OLD, VERSIONABLES_OLD_ORPHANED);
assertVersionsCanBeRestored(session, VERSIONABLES_YOUNG);
}
@Test
public void referencedOlderThanOrphaned() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyOrphanedVersions(betweenHistories);
}
});
assertVersionableProperties(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertExistingHistories(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG, VERSIONABLES_YOUNG_ORPHANED);
assertVersionablePaths(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertMissingHistories(session, VERSIONABLES_OLD_ORPHANED);
assertVersionsCanBeRestored(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
}
@Test
public void onlyReferenced() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyOrphanedVersions(null);
}
});
assertVersionableProperties(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertExistingHistories(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertVersionablePaths(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);;
assertMissingHistories(session, VERSIONABLES_OLD_ORPHANED, VERSIONABLES_YOUNG_ORPHANED);
assertVersionsCanBeRestored(session, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
}
@Test
public void onlyReferencedAfterDate() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyVersions(betweenHistories);
config.setCopyOrphanedVersions(null);
}
});
assertVersionableProperties(session, VERSIONABLES_YOUNG);
assertExistingHistories(session, VERSIONABLES_YOUNG);
assertVersionablePaths(session, VERSIONABLES_YOUNG);
assertMissingHistories(session, VERSIONABLES_OLD, VERSIONABLES_OLD_ORPHANED, VERSIONABLES_YOUNG_ORPHANED);
assertVersionsCanBeRestored(session, VERSIONABLES_YOUNG);
}
@Test
public void overrideOrphaned() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyVersions(null);
config.setCopyOrphanedVersions(betweenHistories);
}
});
assertMissingHistories(session,
VERSIONABLES_OLD, VERSIONABLES_OLD_ORPHANED, VERSIONABLES_YOUNG, VERSIONABLES_YOUNG_ORPHANED);
}
@Test
public void dontCopyVersionHistory() throws RepositoryException, IOException {
Session session = performCopy(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyVersions(null);
config.setCopyOrphanedVersions(null);
}
});
assertMissingHistories(session,
VERSIONABLES_OLD, VERSIONABLES_OLD_ORPHANED, VERSIONABLES_YOUNG, VERSIONABLES_YOUNG_ORPHANED);
assertNotNull(session.getNode("/jcr:system/jcr:versionStorage")
.getPrimaryNodeType());
}
@Test
public void removeVersionHistory() throws RepositoryException, IOException {
final NodeStore targetNodeStore = SegmentNodeStoreBuilders.builder(new MemoryStore()).build();
migrate(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
}
}, targetNodeStore, PathUtils.ROOT_PATH);
migrate(new VersionCopySetup() {
@Override
public void setup(VersionCopyConfiguration config) {
config.setCopyVersions(null);
config.setCopyOrphanedVersions(null);
}
}, targetNodeStore, "/versionables");
repository = (RepositoryImpl) new Jcr(new Oak(targetNodeStore)).createRepository();
Session s = repository.login(AbstractRepositoryUpgradeTest.CREDENTIALS);
sessions.add(s);
assertMissingHistories(s, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
assertNonVersionablePaths(s, VERSIONABLES_OLD, VERSIONABLES_YOUNG);
}
protected Session performCopy(VersionCopySetup setup) throws RepositoryException, IOException {
final NodeStore targetNodeStore = SegmentNodeStoreBuilders.builder(new MemoryStore()).build();
migrate(setup, targetNodeStore, PathUtils.ROOT_PATH);
repository = (RepositoryImpl) new Jcr(new Oak(targetNodeStore)).createRepository();
Session s = repository.login(AbstractRepositoryUpgradeTest.CREDENTIALS);
sessions.add(s);
return s;
}
protected void migrate(VersionCopySetup setup, NodeStore target, String includePath) throws RepositoryException, IOException {
final RepositoryConfig sourceConfig = RepositoryConfig.create(source);
final RepositoryContext sourceContext = RepositoryContext.create(sourceConfig);
try {
final RepositoryUpgrade upgrade = new RepositoryUpgrade(sourceContext, target);
upgrade.setIncludes(includePath);
setup.setup(upgrade.versionCopyConfiguration);
upgrade.setEarlyShutdown(false);
upgrade.copy(null);
} finally {
sourceContext.getRepository().shutdown();
}
}
@After
public void closeRepository() {
for (Session s : sessions) {
s.logout();
}
sessions.clear();
repository.shutdown();
}
private static String rel(final String path) {
if (path.startsWith("/")) {
return path.substring(1);
}
return path;
}
private static VersionHistory getVersionHistoryForPath(Session session, String path)
throws RepositoryException {
final Node root = session.getRootNode();
if (root.hasNode(rel(pathToVersionHistory.get(path)))) {
return (VersionHistory)session.getNode(pathToVersionHistory.get(path));
}
return null;
}
private static void assertVersionableProperties(final Session session, final String... names) throws RepositoryException {
VersionManager vMgr = session.getWorkspace().getVersionManager();
for (final String mixin : MIXINS) {
final String pathPrefix = VERSIONABLES_PATH_PREFIX + mixin + "/";
for (final String name : names) {
final String path = pathPrefix + name;
Node versionable = session.getNode(path);
String versionHistoryUuid = versionable.getProperty(JCR_VERSIONHISTORY).getString();
assertEquals(getVersionHistoryForPath(session, path).getIdentifier(), versionHistoryUuid);
final Version baseVersion = vMgr.getBaseVersion(path);
assertEquals("1.2", baseVersion.getName());
final Value[] predecessors = versionable.getProperty(JCR_PREDECESSORS).getValues();
assertEquals(1, predecessors.length);
assertEquals(baseVersion.getIdentifier(), predecessors[0].getString());
}
}
}
private static void assertExistingHistories(final Session session, final String... names)
throws RepositoryException {
for (final String mixin : MIXINS) {
final String pathPrefix = VERSIONABLES_PATH_PREFIX + mixin + "/";
for (final String name : names) {
final String path = pathPrefix + name;
final VersionHistory history = getVersionHistoryForPath(session, path);
assertNotNull("No history found for " + path, history);
VersionCopyTestUtils.assertLabeledVersions(history);
}
}
}
private static void assertMissingHistories(final Session session, final String... names)
throws RepositoryException {
for (final String mixin : MIXINS) {
final String pathPrefix = VERSIONABLES_PATH_PREFIX + mixin + "/";
for (final String name : names) {
final String path = pathPrefix + name;
final VersionHistory history = getVersionHistoryForPath(session, path);
assertNull("Should not have found history for " + path, history);
}
}
}
private static void assertVersionablePaths(final Session session, final String... names)
throws RepositoryException {
for (final String mixin : MIXINS) {
final String pathPrefix = VERSIONABLES_PATH_PREFIX + mixin + "/";
for (final String name : names) {
final String path = pathPrefix + name;
final Node node = session.getNode(path);
assertTrue("Node " + path + " should have mix:versionable mixin", node.isNodeType(MIX_VERSIONABLE));
final VersionHistory history = getVersionHistoryForPath(session, path);
assertVersionablePath(history, path);
}
}
}
private static void assertNonVersionablePaths(final Session session, final String... names)
throws RepositoryException {
for (final String mixin : MIXINS) {
final String pathPrefix = VERSIONABLES_PATH_PREFIX + mixin + "/";
for (final String name : names) {
final String path = pathPrefix + name;
final Node node = session.getNode(path);
assertFalse("Node " + path + " shouldn't have mix:versionable mixin", node.isNodeType(MIX_VERSIONABLE));
}
}
}
private static void assertVersionablePath(final VersionHistory history, final String versionablePath)
throws RepositoryException {
final String workspaceName = history.getSession().getWorkspace().getName();
assertTrue(history.isNodeType(MIX_REP_VERSIONABLE_PATHS));
assertTrue(history.hasProperty(workspaceName));
final Property pathProperty = history.getProperty(workspaceName);
assertEquals(PropertyType.PATH, pathProperty.getType());
assertEquals(versionablePath, pathProperty.getString());
}
private static void assertVersionsCanBeRestored(final Session session, final String... names) throws RepositoryException {
VersionManager vMgr = session.getWorkspace().getVersionManager();
for (final String mixin : MIXINS) {
final String pathPrefix = VERSIONABLES_PATH_PREFIX + mixin + "/";
for (final String name : names) {
final String path = pathPrefix + name;
VersionHistory history = vMgr.getVersionHistory(path);
assertEquals("1.2", session.getNode(path).getProperty("version").getString());
vMgr.restore(history.getVersion("1.0"), false);
Node versionable = session.getNode(path);
assertEquals("1.0", versionable.getProperty("version").getString());
// restored node should have correct properties
String versionHistoryUuid = versionable.getProperty(JCR_VERSIONHISTORY).getString();
assertEquals(history.getIdentifier(), versionHistoryUuid);
final Version baseVersion = vMgr.getBaseVersion(path);
assertEquals("1.0", baseVersion.getName());
final Value[] predecessors = versionable.getProperty(JCR_PREDECESSORS).getValues();
assertEquals(0, predecessors.length);
assertFalse(vMgr.isCheckedOut(path));
}
}
// after restoring, the paths should be still versionable
assertVersionablePaths(session, names);
}
}
|
|
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.rabbit;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.zip.Deflater;
import org.aopalliance.aop.Advice;
import org.apache.commons.logging.Log;
import org.junit.Rule;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.springframework.amqp.core.AcknowledgeMode;
import org.springframework.amqp.core.MessageDeliveryMode;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.rabbit.connection.CachingConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer;
import org.springframework.amqp.support.AmqpHeaders;
import org.springframework.amqp.support.postprocessor.DelegatingDecompressingPostProcessor;
import org.springframework.amqp.utils.test.TestUtils;
import org.springframework.beans.DirectFieldAccessor;
import org.springframework.cloud.stream.binder.AbstractTestBinder;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.BinderPropertyKeys;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.PartitionCapableBinderTests;
import org.springframework.cloud.stream.binder.Spy;
import org.springframework.cloud.stream.test.junit.rabbit.RabbitTestSupport;
import org.springframework.context.ApplicationContext;
import org.springframework.expression.spel.standard.SpelExpression;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.integration.endpoint.AbstractEndpoint;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.MessageHeaders;
import org.springframework.messaging.MessagingException;
import org.springframework.messaging.support.GenericMessage;
/**
* @author Mark Fisher
* @author Gary Russell
* @author David Turanski
*/
public class RabbitBinderTests extends PartitionCapableBinderTests {
private final String CLASS_UNDER_TEST_NAME = RabbitMessageChannelBinder.class.getSimpleName();
public static final String TEST_PREFIX = "bindertest.";
@Rule
public RabbitTestSupport rabbitAvailableRule = new RabbitTestSupport();
@Override
protected Binder<MessageChannel> getBinder() {
if (testBinder == null) {
testBinder = new RabbitTestBinder(rabbitAvailableRule.getResource());
}
return testBinder;
}
@Override
protected boolean usesExplicitRouting() {
return true;
}
@Test
public void testSendAndReceiveBad() throws Exception {
Binder<MessageChannel> binder = getBinder();
DirectChannel moduleOutputChannel = new DirectChannel();
DirectChannel moduleInputChannel = new DirectChannel();
Binding<MessageChannel> producerBinding = binder.bindProducer("bad.0", moduleOutputChannel, null);
Binding<MessageChannel> consumerBinding = binder.bindConsumer("bad.0", "test", moduleInputChannel, null);
Message<?> message = MessageBuilder.withPayload("bad").setHeader(MessageHeaders.CONTENT_TYPE,
"foo/bar").build();
final CountDownLatch latch = new CountDownLatch(3);
moduleInputChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
latch.countDown();
throw new RuntimeException("bad");
}
});
moduleOutputChannel.send(message);
assertTrue(latch.await(10, TimeUnit.SECONDS));
binder.unbind(consumerBinding);
binder.unbind(producerBinding);
}
@Test
public void testConsumerProperties() throws Exception {
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("transacted", "true"); // test transacted with defaults; not allowed with ackmode NONE
Binding<MessageChannel> consumerBinding = binder.bindConsumer("props.0", null, new DirectChannel(), properties);
@SuppressWarnings("unchecked")
List<Binding<MessageChannel>> bindings = TestUtils.getPropertyValue(binder, "binder.bindings", List.class);
assertEquals(1, bindings.size());
AbstractEndpoint endpoint = bindings.get(0).getEndpoint();
SimpleMessageListenerContainer container = TestUtils.getPropertyValue(endpoint, "messageListenerContainer",
SimpleMessageListenerContainer.class);
assertEquals(AcknowledgeMode.AUTO, container.getAcknowledgeMode());
assertThat(container.getQueueNames()[0],
startsWith(RabbitMessageChannelBinder.DEFAULT_RABBIT_PREFIX));
assertTrue(TestUtils.getPropertyValue(container, "transactional", Boolean.class));
assertEquals(1, TestUtils.getPropertyValue(container, "concurrentConsumers"));
assertNull(TestUtils.getPropertyValue(container, "maxConcurrentConsumers"));
assertTrue(TestUtils.getPropertyValue(container, "defaultRequeueRejected", Boolean.class));
assertEquals(1, TestUtils.getPropertyValue(container, "prefetchCount"));
assertEquals(1, TestUtils.getPropertyValue(container, "txSize"));
Advice retry = TestUtils.getPropertyValue(container, "adviceChain", Advice[].class)[0];
assertEquals(3, TestUtils.getPropertyValue(retry, "retryOperations.retryPolicy.maxAttempts"));
assertEquals(1000L, TestUtils.getPropertyValue(retry, "retryOperations.backOffPolicy.initialInterval"));
assertEquals(10000L, TestUtils.getPropertyValue(retry, "retryOperations.backOffPolicy.maxInterval"));
assertEquals(2.0, TestUtils.getPropertyValue(retry, "retryOperations.backOffPolicy.multiplier"));
binder.unbind(consumerBinding);
assertEquals(0, bindings.size());
properties = new Properties();
properties.put("ackMode", "NONE");
properties.put("backOffInitialInterval", "2000");
properties.put("backOffMaxInterval", "20000");
properties.put("backOffMultiplier", "5.0");
properties.put("concurrency", "2");
properties.put("maxAttempts", "23");
properties.put("maxConcurrency", "3");
properties.put("prefix", "foo.");
properties.put("prefetch", "20");
properties.put("requestHeaderPatterns", "foo");
properties.put("requeue", "false");
properties.put("txSize", "10");
properties.put("partitionIndex", 0);
consumerBinding = binder.bindConsumer("props.0", "test", new DirectChannel(), properties);
@SuppressWarnings("unchecked")
List<Binding<MessageChannel>> bindingsNow = TestUtils.getPropertyValue(binder, "binder.bindings", List.class);
assertEquals(1, bindingsNow.size());
endpoint = bindingsNow.get(0).getEndpoint();
container = verifyContainer(endpoint);
assertEquals("foo.props.0.test", container.getQueueNames()[0]);
binder.unbind(consumerBinding);
assertEquals(0, bindingsNow.size());
}
@Test
public void testProducerProperties() throws Exception {
Binder<MessageChannel> binder = getBinder();
Binding<MessageChannel> producerBinding = binder.bindProducer("props.0", new DirectChannel(), null);
@SuppressWarnings("unchecked")
List<Binding<MessageChannel>> bindings = TestUtils.getPropertyValue(binder, "binder.bindings", List.class);
assertEquals(1, bindings.size());
AbstractEndpoint endpoint = bindings.get(0).getEndpoint();
MessageDeliveryMode mode = TestUtils.getPropertyValue(endpoint, "handler.delegate.defaultDeliveryMode",
MessageDeliveryMode.class);
assertEquals(MessageDeliveryMode.PERSISTENT, mode);
List<?> requestHeaders = TestUtils.getPropertyValue(endpoint,
"handler.delegate.headerMapper.requestHeaderMatcher.strategies", List.class);
assertEquals(2, requestHeaders.size());
binder.unbind(producerBinding);
assertEquals(0, bindings.size());
Properties properties = new Properties();
properties.put("prefix", "foo.");
properties.put("deliveryMode", "NON_PERSISTENT");
properties.put("requestHeaderPatterns", "foo");
properties.put("partitionKeyExpression", "'foo'");
properties.put("partitionKeyExtractorClass", "foo");
properties.put("partitionSelectorExpression", "0");
properties.put("partitionSelectorClass", "foo");
properties.put(BinderPropertyKeys.NEXT_MODULE_COUNT, "1");
producerBinding = binder.bindProducer("props.0", new DirectChannel(), properties);
assertEquals(1, bindings.size());
endpoint = bindings.get(0).getEndpoint();
assertEquals(
"'props.0-' + headers['partition']",
TestUtils.getPropertyValue(endpoint, "handler.delegate.routingKeyExpression",
SpelExpression.class).getExpressionString());
mode = TestUtils.getPropertyValue(endpoint, "handler.delegate.defaultDeliveryMode",
MessageDeliveryMode.class);
assertEquals(MessageDeliveryMode.NON_PERSISTENT, mode);
verifyFooRequestProducer(endpoint);
binder.unbind(producerBinding);
assertEquals(0, bindings.size());
}
@Test
public void testDurablePubSubWithAutoBindDLQ() throws Exception {
RabbitAdmin admin = new RabbitAdmin(this.rabbitAvailableRule.getResource());
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("prefix", TEST_PREFIX);
properties.put("autoBindDLQ", "true");
properties.put("durableSubscription", "true");
properties.put("maxAttempts", "1"); // disable retry
properties.put("requeue", "false");
DirectChannel moduleInputChannel = new DirectChannel();
moduleInputChannel.setBeanName("durableTest");
moduleInputChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
throw new RuntimeException("foo");
}
});
Binding<MessageChannel> consumerBinding = binder.bindConsumer("durabletest.0", "tgroup", moduleInputChannel, properties);
RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
template.convertAndSend(TEST_PREFIX + "durabletest.0", "", "foo");
int n = 0;
while (n++ < 100) {
Object deadLetter = template.receiveAndConvert(TEST_PREFIX + "durabletest.0.tgroup.dlq");
if (deadLetter != null) {
assertEquals("foo", deadLetter);
break;
}
Thread.sleep(100);
}
assertTrue(n < 100);
binder.unbind(consumerBinding);
assertNotNull(admin.getQueueProperties(TEST_PREFIX + "durabletest.0.tgroup.dlq"));
}
@Test
public void testNonDurablePubSubWithAutoBindDLQ() throws Exception {
RabbitAdmin admin = new RabbitAdmin(this.rabbitAvailableRule.getResource());
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("prefix", TEST_PREFIX);
properties.put("autoBindDLQ", "true");
properties.put("durableSubscription", "false");
properties.put("maxAttempts", "1"); // disable retry
properties.put("requeue", "false");
DirectChannel moduleInputChannel = new DirectChannel();
moduleInputChannel.setBeanName("nondurabletest");
moduleInputChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
throw new RuntimeException("foo");
}
});
Binding<MessageChannel> consumerBinding = binder.bindConsumer("nondurabletest.0", "tgroup", moduleInputChannel, properties);
binder.unbind(consumerBinding);
assertNull(admin.getQueueProperties(TEST_PREFIX + "nondurabletest.0.dlq"));
}
@Test
public void testAutoBindDLQ() throws Exception {
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("prefix", TEST_PREFIX);
properties.put("autoBindDLQ", "true");
properties.put("maxAttempts", "1"); // disable retry
properties.put("requeue", "false");
properties.put("durableSubscription","true");
DirectChannel moduleInputChannel = new DirectChannel();
moduleInputChannel.setBeanName("dlqTest");
moduleInputChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
throw new RuntimeException("foo");
}
});
Binding<MessageChannel> consumerBinding = binder.bindConsumer("dlqtest", "default", moduleInputChannel, properties);
RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
template.convertAndSend("", TEST_PREFIX + "dlqtest.default", "foo");
int n = 0;
while (n++ < 100) {
Object deadLetter = template.receiveAndConvert(TEST_PREFIX + "dlqtest.default.dlq");
if (deadLetter != null) {
assertEquals("foo", deadLetter);
break;
}
Thread.sleep(100);
}
assertTrue(n < 100);
binder.unbind(consumerBinding);
ApplicationContext context = TestUtils.getPropertyValue(binder, "binder.autoDeclareContext",
ApplicationContext.class);
assertFalse(context.containsBean(TEST_PREFIX + "dlqtest.default.binding"));
assertFalse(context.containsBean(TEST_PREFIX + "dlqtest.default"));
assertFalse(context.containsBean(TEST_PREFIX + "dlqtest.default.dlq.binding"));
assertFalse(context.containsBean(TEST_PREFIX + "dlqtest.default.dlq"));
}
@Test
public void testAutoBindDLQPartionedConsumerFirst() throws Exception {
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("prefix", "bindertest.");
properties.put("autoBindDLQ", "true");
properties.put("maxAttempts", "1"); // disable retry
properties.put("requeue", "false");
properties.put("partitionIndex", "0");
properties.put("durableSubscription","true");
DirectChannel input0 = new DirectChannel();
input0.setBeanName("test.input0DLQ");
Binding<MessageChannel> input0Binding = binder.bindConsumer("partDLQ.0", "dlqPartGrp", input0, properties);
Binding<MessageChannel> defaultConsumerBinding1 =
binder.bindConsumer("partDLQ.0", "default", new QueueChannel(), properties);
properties.put("partitionIndex", "1");
DirectChannel input1 = new DirectChannel();
input1.setBeanName("test.input1DLQ");
Binding<MessageChannel> input1Binding = binder.bindConsumer("partDLQ.0", "dlqPartGrp", input1, properties);
Binding<MessageChannel> defaultConsumerBinding2 = binder.bindConsumer("partDLQ.0", "default", new QueueChannel(), properties);
properties.clear();
properties.put("prefix", "bindertest.");
properties.put("autoBindDLQ", "true");
properties.put("partitionKeyExtractorClass", "org.springframework.cloud.stream.binder.PartitionTestSupport");
properties.put("partitionSelectorClass", "org.springframework.cloud.stream.binder.PartitionTestSupport");
properties.put(BinderPropertyKeys.NEXT_MODULE_COUNT, "2");
DirectChannel output = new DirectChannel();
output.setBeanName("test.output");
Binding<MessageChannel> outputBinding = binder.bindProducer("partDLQ.0", output, properties);
final CountDownLatch latch0 = new CountDownLatch(1);
input0.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
if (latch0.getCount() <= 0) {
throw new RuntimeException("dlq");
}
latch0.countDown();
}
});
final CountDownLatch latch1 = new CountDownLatch(1);
input1.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
if (latch1.getCount() <= 0) {
throw new RuntimeException("dlq");
}
latch1.countDown();
}
});
output.send(new GenericMessage<Integer>(1));
assertTrue(latch1.await(10, TimeUnit.SECONDS));
output.send(new GenericMessage<Integer>(0));
assertTrue(latch0.await(10, TimeUnit.SECONDS));
output.send(new GenericMessage<Integer>(1));
RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
template.setReceiveTimeout(10000);
String streamDLQName = "bindertest.partDLQ.0.dlqPartGrp.dlq";
org.springframework.amqp.core.Message received = template.receive(streamDLQName);
assertNotNull(received);
assertEquals(1, received.getMessageProperties().getHeaders().get("partition"));
output.send(new GenericMessage<Integer>(0));
received = template.receive(streamDLQName);
assertNotNull(received);
assertEquals(0, received.getMessageProperties().getHeaders().get("partition"));
binder.unbind(input0Binding);
binder.unbind(input1Binding);
binder.unbind(defaultConsumerBinding1);
binder.unbind(defaultConsumerBinding2);
binder.unbind(outputBinding);
}
@Test
public void testAutoBindDLQPartionedProducerFirst() throws Exception {
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("prefix", "bindertest.");
properties.put("autoBindDLQ", "true");
properties.put("partitionKeyExtractorClass", "org.springframework.cloud.stream.binder.PartitionTestSupport");
properties.put("partitionSelectorClass", "org.springframework.cloud.stream.binder.PartitionTestSupport");
properties.put(BinderPropertyKeys.NEXT_MODULE_COUNT, "2");
DirectChannel output = new DirectChannel();
output.setBeanName("test.output");
Binding<MessageChannel> outputBinding = binder.bindProducer("partDLQ.1", output, properties);
properties.clear();
properties.put("prefix", "bindertest.");
properties.put("autoBindDLQ", "true");
properties.put("maxAttempts", "1"); // disable retry
properties.put("requeue", "false");
properties.put("partitionIndex", "0");
properties.put(BinderPropertyKeys.DURABLE,"true");
DirectChannel input0 = new DirectChannel();
input0.setBeanName("test.input0DLQ");
Binding<MessageChannel> input0Binding = binder.bindConsumer("partDLQ.1", "dlqPartGrp", input0, properties);
Binding<MessageChannel> defaultConsumerBinding1 = binder.bindConsumer("partDLQ.1", "defaultConsumer", new QueueChannel(), properties);
properties.put("partitionIndex", "1");
DirectChannel input1 = new DirectChannel();
input1.setBeanName("test.input1DLQ");
Binding<MessageChannel> input1Binding = binder.bindConsumer("partDLQ.1", "dlqPartGrp", input1, properties);
Binding<MessageChannel> defaultConsumerBinding2 = binder.bindConsumer("partDLQ.1", "defaultConsumer", new QueueChannel(), properties);
final CountDownLatch latch0 = new CountDownLatch(1);
input0.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
if (latch0.getCount() <= 0) {
throw new RuntimeException("dlq");
}
latch0.countDown();
}
});
final CountDownLatch latch1 = new CountDownLatch(1);
input1.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
if (latch1.getCount() <= 0) {
throw new RuntimeException("dlq");
}
latch1.countDown();
}
});
output.send(new GenericMessage<Integer>(1));
assertTrue(latch1.await(10, TimeUnit.SECONDS));
output.send(new GenericMessage<Integer>(0));
assertTrue(latch0.await(10, TimeUnit.SECONDS));
output.send(new GenericMessage<Integer>(1));
RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
template.setReceiveTimeout(10000);
String streamDLQName = "bindertest.partDLQ.1.dlqPartGrp.dlq";
org.springframework.amqp.core.Message received = template.receive(streamDLQName);
assertNotNull(received);
assertEquals(1, received.getMessageProperties().getHeaders().get("partition"));
output.send(new GenericMessage<Integer>(0));
received = template.receive(streamDLQName);
assertNotNull(received);
assertEquals(0, received.getMessageProperties().getHeaders().get("partition"));
binder.unbind(input0Binding);
binder.unbind(input1Binding);
binder.unbind(defaultConsumerBinding1);
binder.unbind(defaultConsumerBinding2);
binder.unbind(outputBinding);
}
@Test
public void testAutoBindDLQwithRepublish() throws Exception {
// pre-declare the queue with dead-lettering, users can also use a policy
RabbitAdmin admin = new RabbitAdmin(this.rabbitAvailableRule.getResource());
Map<String, Object> args = new HashMap<String, Object>();
args.put("x-dead-letter-exchange", TEST_PREFIX + "DLX");
args.put("x-dead-letter-routing-key", TEST_PREFIX + "dlqpubtest.default");
Queue queue = new Queue(TEST_PREFIX + "dlqpubtest.default", true, false, false, args);
admin.declareQueue(queue);
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("prefix", TEST_PREFIX);
properties.put("autoBindDLQ", "true");
properties.put("republishToDLQ", "true");
properties.put("maxAttempts", "1"); // disable retry
properties.put("requeue", "false");
properties.put("durableSubscription", "true");
DirectChannel moduleInputChannel = new DirectChannel();
moduleInputChannel.setBeanName("dlqPubTest");
moduleInputChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
throw new RuntimeException("foo");
}
});
Binding<MessageChannel> consumerBinding = binder.bindConsumer("dlqpubtest", "default", moduleInputChannel, properties);
RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
template.convertAndSend("", TEST_PREFIX + "dlqpubtest.default", "foo");
int n = 0;
while (n++ < 100) {
org.springframework.amqp.core.Message deadLetter = template.receive(TEST_PREFIX + "dlqpubtest.default.dlq");
if (deadLetter != null) {
assertEquals("foo", new String(deadLetter.getBody()));
assertNotNull(deadLetter.getMessageProperties().getHeaders().get("x-exception-stacktrace"));
break;
}
Thread.sleep(100);
}
assertTrue(n < 100);
binder.unbind(consumerBinding);
}
@SuppressWarnings("unchecked")
@Test
public void testBatchingAndCompression() throws Exception {
RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
Binder<MessageChannel> binder = getBinder();
Properties properties = new Properties();
properties.put("deliveryMode", "NON_PERSISTENT");
properties.put("batchingEnabled", "true");
properties.put("batchSize", "2");
properties.put("batchBufferLimit", "100000");
properties.put("batchTimeout", "30000");
properties.put("compress", "true");
DirectChannel output = new DirectChannel();
output.setBeanName("batchingProducer");
Binding<MessageChannel> producerBinding = binder.bindProducer("batching.0", output, properties);
while (template.receive(RabbitMessageChannelBinder.DEFAULT_RABBIT_PREFIX + "batching.0.default") != null) {
}
Log logger = spy(TestUtils.getPropertyValue(binder, "binder.compressingPostProcessor.logger", Log.class));
new DirectFieldAccessor(TestUtils.getPropertyValue(binder, "binder.compressingPostProcessor"))
.setPropertyValue("logger", logger);
when(logger.isTraceEnabled()).thenReturn(true);
assertEquals(Deflater.BEST_SPEED, TestUtils.getPropertyValue(binder, "binder.compressingPostProcessor.level"));
output.send(new GenericMessage<>("foo".getBytes()));
output.send(new GenericMessage<>("bar".getBytes()));
Object out = spyOn("batching.0.default").receive(false);
assertThat(out, instanceOf(byte[].class));
assertEquals("\u0000\u0000\u0000\u0003foo\u0000\u0000\u0000\u0003bar", new String((byte[]) out));
ArgumentCaptor<Object> captor = ArgumentCaptor.forClass(Object.class);
verify(logger).trace(captor.capture());
assertThat(captor.getValue().toString(), containsString("Compressed 14 to "));
QueueChannel input = new QueueChannel();
input.setBeanName("batchingConsumer");
Binding<MessageChannel> consumerBinding = binder.bindConsumer("batching.0", "test", input, null);
output.send(new GenericMessage<>("foo".getBytes()));
output.send(new GenericMessage<>("bar".getBytes()));
Message<byte[]> in = (Message<byte[]>) input.receive(10000);
assertNotNull(in);
assertEquals("foo", new String(in.getPayload()));
in = (Message<byte[]>) input.receive(10000);
assertNotNull(in);
assertEquals("bar", new String(in.getPayload()));
assertNull(in.getHeaders().get(AmqpHeaders.DELIVERY_MODE));
binder.unbind(producerBinding);
binder.unbind(consumerBinding);
}
/*
* Test late binding due to broker down; queues with and without DLQs, and
* partitioned queues.
*/
@Test
public void testLateBinding() throws Exception {
RabbitTestSupport.RabbitProxy proxy = new RabbitTestSupport.RabbitProxy();
CachingConnectionFactory cf = new CachingConnectionFactory("localhost", proxy.getPort());
RabbitMessageChannelBinder rabbitBinder = new RabbitMessageChannelBinder(cf);
rabbitBinder.setDefaultAutoBindDLQ(true);
AbstractTestBinder<RabbitMessageChannelBinder> binder = new RabbitTestBinder(cf, rabbitBinder);
Properties properties = new Properties();
properties.put("prefix", "latebinder.");
MessageChannel moduleOutputChannel = new DirectChannel();
Binding<MessageChannel> late0ProducerBinding = binder.bindProducer("late.0", moduleOutputChannel, properties);
QueueChannel moduleInputChannel = new QueueChannel();
Binding<MessageChannel> late0ConsumerBinding = binder.bindConsumer("late.0", "test", moduleInputChannel, properties);
properties.put("partitionKeyExpression", "payload.equals('0') ? 0 : 1");
properties.put("partitionSelectorExpression", "hashCode()");
properties.put("nextModuleCount", "2");
MessageChannel partOutputChannel = new DirectChannel();
Binding<MessageChannel> partlate0ProducerBinding = binder.bindProducer("partlate.0", partOutputChannel, properties);
QueueChannel partInputChannel0 = new QueueChannel();
QueueChannel partInputChannel1 = new QueueChannel();
properties.clear();
properties.put("prefix", "latebinder.");
properties.put("partitionIndex", "0");
Binding<MessageChannel> partlate0Consumer0Binding = binder.bindConsumer("partlate.0", "test", partInputChannel0, properties);
properties.put("partitionIndex", "1");
Binding<MessageChannel> partlate0Consumer1Binding = binder.bindConsumer("partlate.0", "test", partInputChannel1, properties);
rabbitBinder.setDefaultAutoBindDLQ(false);
properties.clear();
properties.put("prefix", "latebinder.");
MessageChannel noDLQOutputChannel = new DirectChannel();
Binding<MessageChannel> noDlqProducerBinding = binder.bindProducer("lateNoDLQ.0", noDLQOutputChannel, properties);
QueueChannel noDLQInputChannel = new QueueChannel();
Binding<MessageChannel> noDlqConsumerBinding = binder.bindConsumer("lateNoDLQ.0", "test", noDLQInputChannel, properties);
MessageChannel outputChannel = new DirectChannel();
Binding<MessageChannel> pubSubProducerBinding = binder.bindProducer("latePubSub", outputChannel, properties);
QueueChannel pubSubInputChannel = new QueueChannel();
Binding<MessageChannel> nonDurableConsumerBinding = binder.bindConsumer("latePubSub", "lategroup", pubSubInputChannel, properties);
QueueChannel durablePubSubInputChannel = new QueueChannel();
properties.setProperty("durableSubscription", "true");
Binding<MessageChannel> durableConsumerBinding = binder.bindConsumer("latePubSub", "lateDurableGroup", durablePubSubInputChannel, properties);
proxy.start();
moduleOutputChannel.send(new GenericMessage<>("foo"));
Message<?> message = moduleInputChannel.receive(10000);
assertNotNull(message);
assertEquals("foo", message.getPayload());
noDLQOutputChannel.send(new GenericMessage<>("bar"));
message = noDLQInputChannel.receive(10000);
assertNotNull(message);
assertEquals("bar", message.getPayload());
outputChannel.send(new GenericMessage<>("baz"));
message = pubSubInputChannel.receive(10000);
assertNotNull(message);
assertEquals("baz", message.getPayload());
message = durablePubSubInputChannel.receive(10000);
assertNotNull(message);
assertEquals("baz", message.getPayload());
partOutputChannel.send(new GenericMessage<>("0"));
partOutputChannel.send(new GenericMessage<>("1"));
message = partInputChannel0.receive(10000);
assertNotNull(message);
assertEquals("0", message.getPayload());
message = partInputChannel1.receive(10000);
assertNotNull(message);
assertEquals("1", message.getPayload());
binder.unbind(late0ProducerBinding);
binder.unbind(late0ConsumerBinding);
binder.unbind(partlate0ProducerBinding);
binder.unbind(partlate0Consumer0Binding);
binder.unbind(partlate0Consumer1Binding);
binder.unbind(noDlqProducerBinding);
binder.unbind(noDlqConsumerBinding);
binder.unbind(pubSubProducerBinding);
binder.unbind(nonDurableConsumerBinding);
binder.unbind(durableConsumerBinding);
binder.cleanup();
proxy.stop();
cf.destroy();
this.rabbitAvailableRule.getResource().destroy();
}
private SimpleMessageListenerContainer verifyContainer(AbstractEndpoint endpoint) {
SimpleMessageListenerContainer container;
Advice retry;
container = TestUtils.getPropertyValue(endpoint, "messageListenerContainer",
SimpleMessageListenerContainer.class);
assertEquals(AcknowledgeMode.NONE, container.getAcknowledgeMode());
assertThat(container.getQueueNames()[0], startsWith("foo.props.0"));
assertFalse(TestUtils.getPropertyValue(container, "transactional", Boolean.class));
assertEquals(2, TestUtils.getPropertyValue(container, "concurrentConsumers"));
assertEquals(3, TestUtils.getPropertyValue(container, "maxConcurrentConsumers"));
assertFalse(TestUtils.getPropertyValue(container, "defaultRequeueRejected", Boolean.class));
assertEquals(20, TestUtils.getPropertyValue(container, "prefetchCount"));
assertEquals(10, TestUtils.getPropertyValue(container, "txSize"));
retry = TestUtils.getPropertyValue(container, "adviceChain", Advice[].class)[0];
assertEquals(23, TestUtils.getPropertyValue(retry, "retryOperations.retryPolicy.maxAttempts"));
assertEquals(2000L, TestUtils.getPropertyValue(retry, "retryOperations.backOffPolicy.initialInterval"));
assertEquals(20000L, TestUtils.getPropertyValue(retry, "retryOperations.backOffPolicy.maxInterval"));
assertEquals(5.0, TestUtils.getPropertyValue(retry, "retryOperations.backOffPolicy.multiplier"));
List<?> requestMatchers = TestUtils.getPropertyValue(endpoint,
"headerMapper.requestHeaderMatcher.strategies",
List.class);
assertEquals(1, requestMatchers.size());
assertEquals("foo",
TestUtils.getPropertyValue(requestMatchers.get(0), "patterns", Collection.class).iterator().next());
return container;
}
private void verifyFooRequestProducer(AbstractEndpoint endpoint) {
List<?> requestMatchers = TestUtils.getPropertyValue(endpoint,
"handler.delegate.headerMapper.requestHeaderMatcher.strategies",
List.class);
assertEquals(1, requestMatchers.size());
assertEquals("foo",
TestUtils.getPropertyValue(requestMatchers.get(0), "patterns", Collection.class).iterator().next());
}
@Override
protected String getEndpointRouting(AbstractEndpoint endpoint) {
return TestUtils.getPropertyValue(endpoint, "handler.delegate.routingKeyExpression",
SpelExpression.class).getExpressionString();
}
@Override
protected String getExpectedRoutingBaseDestination(String name, String group) {
return name;
}
@Override
protected String getPubSubEndpointRouting(AbstractEndpoint endpoint) {
return TestUtils.getPropertyValue(endpoint, "handler.delegate.exchangeNameExpression",
SpelExpression.class).getExpressionString();
}
@Override
protected String getClassUnderTestName() {
return CLASS_UNDER_TEST_NAME;
}
@Override
public Spy spyOn(final String queue) {
final RabbitTemplate template = new RabbitTemplate(this.rabbitAvailableRule.getResource());
template.setAfterReceivePostProcessors(new DelegatingDecompressingPostProcessor());
return new Spy() {
@Override
public Object receive(boolean expectNull) throws Exception {
if (expectNull) {
Thread.sleep(50);
return template.receiveAndConvert(RabbitMessageChannelBinder.DEFAULT_RABBIT_PREFIX + queue);
}
Object bar = null;
int n = 0;
while (n++ < 100 && bar == null) {
bar = template.receiveAndConvert(RabbitMessageChannelBinder.DEFAULT_RABBIT_PREFIX + queue);
Thread.sleep(100);
}
assertTrue("Message did not arrive in RabbitMQ", n < 100);
return bar;
}
};
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.avatica.jdbc;
import org.apache.calcite.avatica.AvaticaParameter;
import org.apache.calcite.avatica.AvaticaPreparedStatement;
import org.apache.calcite.avatica.AvaticaUtils;
import org.apache.calcite.avatica.ColumnMetaData;
import org.apache.calcite.avatica.ConnectionPropertiesImpl;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.avatica.MetaImpl;
import org.apache.calcite.avatica.MissingResultsException;
import org.apache.calcite.avatica.NoSuchConnectionException;
import org.apache.calcite.avatica.NoSuchStatementException;
import org.apache.calcite.avatica.QueryState;
import org.apache.calcite.avatica.SqlType;
import org.apache.calcite.avatica.metrics.Gauge;
import org.apache.calcite.avatica.metrics.MetricsSystem;
import org.apache.calcite.avatica.metrics.noop.NoopMetricsSystem;
import org.apache.calcite.avatica.remote.TypedValue;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.calcite.avatica.remote.MetricsHelper.concat;
import java.lang.reflect.InvocationTargetException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/** Implementation of {@link Meta} upon an existing JDBC data source. */
public class JdbcMeta implements Meta {
private static final Logger LOG = LoggerFactory.getLogger(JdbcMeta.class);
private static final String CONN_CACHE_KEY_BASE = "avatica.connectioncache";
private static final String STMT_CACHE_KEY_BASE = "avatica.statementcache";
/** Special value for {@code Statement#getLargeMaxRows()} that means fetch
* an unlimited number of rows in a single batch.
*
* <p>Any other negative value will return an unlimited number of rows but
* will do it in the default batch size, namely 100. */
public static final long UNLIMITED_COUNT = -2L;
// End of constants, start of member variables
final Calendar calendar = Calendar.getInstance();
/** Generates ids for statements. The ids are unique across all connections
* created by this JdbcMeta. */
private final AtomicInteger statementIdGenerator = new AtomicInteger();
private final String url;
private final Properties info;
private final Cache<String, Connection> connectionCache;
private final Cache<Integer, StatementInfo> statementCache;
private final MetricsSystem metrics;
/**
* Creates a JdbcMeta.
*
* @param url a database url of the form
* <code>jdbc:<em>subprotocol</em>:<em>subname</em></code>
*/
public JdbcMeta(String url) throws SQLException {
this(url, new Properties());
}
/**
* Creates a JdbcMeta.
*
* @param url a database url of the form
* <code>jdbc:<em>subprotocol</em>:<em>subname</em></code>
* @param user the database user on whose behalf the connection is being
* made
* @param password the user's password
*/
public JdbcMeta(final String url, final String user, final String password)
throws SQLException {
this(url, new Properties() {
{
put("user", user);
put("password", password);
}
});
}
public JdbcMeta(String url, Properties info) throws SQLException {
this(url, info, NoopMetricsSystem.getInstance());
}
/**
* Creates a JdbcMeta.
*
* @param url a database url of the form
* <code> jdbc:<em>subprotocol</em>:<em>subname</em></code>
* @param info a list of arbitrary string tag/value pairs as
* connection arguments; normally at least a "user" and
* "password" property should be included
*/
public JdbcMeta(String url, Properties info, MetricsSystem metrics)
throws SQLException {
this.url = url;
this.info = info;
this.metrics = Objects.requireNonNull(metrics);
int concurrencyLevel = Integer.parseInt(
info.getProperty(ConnectionCacheSettings.CONCURRENCY_LEVEL.key(),
ConnectionCacheSettings.CONCURRENCY_LEVEL.defaultValue()));
int initialCapacity = Integer.parseInt(
info.getProperty(ConnectionCacheSettings.INITIAL_CAPACITY.key(),
ConnectionCacheSettings.INITIAL_CAPACITY.defaultValue()));
long maxCapacity = Long.parseLong(
info.getProperty(ConnectionCacheSettings.MAX_CAPACITY.key(),
ConnectionCacheSettings.MAX_CAPACITY.defaultValue()));
long connectionExpiryDuration = Long.parseLong(
info.getProperty(ConnectionCacheSettings.EXPIRY_DURATION.key(),
ConnectionCacheSettings.EXPIRY_DURATION.defaultValue()));
TimeUnit connectionExpiryUnit = TimeUnit.valueOf(
info.getProperty(ConnectionCacheSettings.EXPIRY_UNIT.key(),
ConnectionCacheSettings.EXPIRY_UNIT.defaultValue()));
this.connectionCache = CacheBuilder.newBuilder()
.concurrencyLevel(concurrencyLevel)
.initialCapacity(initialCapacity)
.maximumSize(maxCapacity)
.expireAfterAccess(connectionExpiryDuration, connectionExpiryUnit)
.removalListener(new ConnectionExpiryHandler())
.build();
LOG.debug("instantiated connection cache: {}", connectionCache.stats());
concurrencyLevel = Integer.parseInt(
info.getProperty(StatementCacheSettings.CONCURRENCY_LEVEL.key(),
StatementCacheSettings.CONCURRENCY_LEVEL.defaultValue()));
initialCapacity = Integer.parseInt(
info.getProperty(StatementCacheSettings.INITIAL_CAPACITY.key(),
StatementCacheSettings.INITIAL_CAPACITY.defaultValue()));
maxCapacity = Long.parseLong(
info.getProperty(StatementCacheSettings.MAX_CAPACITY.key(),
StatementCacheSettings.MAX_CAPACITY.defaultValue()));
connectionExpiryDuration = Long.parseLong(
info.getProperty(StatementCacheSettings.EXPIRY_DURATION.key(),
StatementCacheSettings.EXPIRY_DURATION.defaultValue()));
connectionExpiryUnit = TimeUnit.valueOf(
info.getProperty(StatementCacheSettings.EXPIRY_UNIT.key(),
StatementCacheSettings.EXPIRY_UNIT.defaultValue()));
this.statementCache = CacheBuilder.newBuilder()
.concurrencyLevel(concurrencyLevel)
.initialCapacity(initialCapacity)
.maximumSize(maxCapacity)
.expireAfterAccess(connectionExpiryDuration, connectionExpiryUnit)
.removalListener(new StatementExpiryHandler())
.build();
LOG.debug("instantiated statement cache: {}", statementCache.stats());
// Register some metrics
this.metrics.register(concat(JdbcMeta.class, "ConnectionCacheSize"), new Gauge<Long>() {
@Override public Long getValue() {
return connectionCache.size();
}
});
this.metrics.register(concat(JdbcMeta.class, "StatementCacheSize"), new Gauge<Long>() {
@Override public Long getValue() {
return statementCache.size();
}
});
}
/**
* Converts from JDBC metadata to Avatica columns.
*/
protected static List<ColumnMetaData>
columns(ResultSetMetaData metaData) throws SQLException {
if (metaData == null) {
return Collections.emptyList();
}
final List<ColumnMetaData> columns = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
final SqlType sqlType = SqlType.valueOf(metaData.getColumnType(i));
final ColumnMetaData.Rep rep = ColumnMetaData.Rep.of(sqlType.internal);
final ColumnMetaData.AvaticaType t;
if (sqlType == SqlType.ARRAY || sqlType == SqlType.STRUCT || sqlType == SqlType.MULTISET) {
ColumnMetaData.AvaticaType arrayValueType = ColumnMetaData.scalar(Types.JAVA_OBJECT,
metaData.getColumnTypeName(i), ColumnMetaData.Rep.OBJECT);
t = ColumnMetaData.array(arrayValueType, metaData.getColumnTypeName(i), rep);
} else {
t = ColumnMetaData.scalar(metaData.getColumnType(i), metaData.getColumnTypeName(i), rep);
}
ColumnMetaData md =
new ColumnMetaData(i - 1, metaData.isAutoIncrement(i),
metaData.isCaseSensitive(i), metaData.isSearchable(i),
metaData.isCurrency(i), metaData.isNullable(i),
metaData.isSigned(i), metaData.getColumnDisplaySize(i),
metaData.getColumnLabel(i), metaData.getColumnName(i),
metaData.getSchemaName(i), metaData.getPrecision(i),
metaData.getScale(i), metaData.getTableName(i),
metaData.getCatalogName(i), t, metaData.isReadOnly(i),
metaData.isWritable(i), metaData.isDefinitelyWritable(i),
metaData.getColumnClassName(i));
columns.add(md);
}
return columns;
}
/**
* Converts from JDBC metadata to Avatica parameters
*/
protected static List<AvaticaParameter> parameters(ParameterMetaData metaData)
throws SQLException {
if (metaData == null) {
return Collections.emptyList();
}
final List<AvaticaParameter> params = new ArrayList<>();
for (int i = 1; i <= metaData.getParameterCount(); i++) {
params.add(
new AvaticaParameter(metaData.isSigned(i), metaData.getPrecision(i),
metaData.getScale(i), metaData.getParameterType(i),
metaData.getParameterTypeName(i),
metaData.getParameterClassName(i), "?" + i));
}
return params;
}
protected static Signature signature(ResultSetMetaData metaData,
ParameterMetaData parameterMetaData, String sql,
Meta.StatementType statementType) throws SQLException {
final CursorFactory cf = CursorFactory.LIST; // because JdbcResultSet#frame
return new Signature(columns(metaData), sql, parameters(parameterMetaData),
null, cf, statementType);
}
protected static Signature signature(ResultSetMetaData metaData)
throws SQLException {
return signature(metaData, null, null, null);
}
public Map<DatabaseProperty, Object> getDatabaseProperties(ConnectionHandle ch) {
try {
final Map<DatabaseProperty, Object> map = new HashMap<>();
final DatabaseMetaData metaData = getConnection(ch.id).getMetaData();
for (DatabaseProperty p : DatabaseProperty.values()) {
addProperty(map, metaData, p);
}
return map;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private static Object addProperty(Map<DatabaseProperty, Object> map,
DatabaseMetaData metaData, DatabaseProperty p) throws SQLException {
try {
return map.put(p, p.method.invoke(metaData));
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getTables(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat tableNamePattern, List<String> typeList) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getTables(catalog, schemaPattern.s,
tableNamePattern.s, toArray(typeList));
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
/**
* Registers a StatementInfo for the given ResultSet, returning the id under
* which it is registered. This should be used for metadata ResultSets, which
* have an implicit statement created.
*/
private int registerMetaStatement(ResultSet rs) throws SQLException {
final int id = statementIdGenerator.getAndIncrement();
StatementInfo statementInfo = new StatementInfo(rs.getStatement());
statementInfo.setResultSet(rs);
statementCache.put(id, statementInfo);
return id;
}
public MetaResultSet getColumns(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat tableNamePattern, Pat columnNamePattern) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getColumns(catalog, schemaPattern.s,
tableNamePattern.s, columnNamePattern.s);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getSchemas(ConnectionHandle ch, String catalog, Pat schemaPattern) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getSchemas(catalog, schemaPattern.s);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getCatalogs(ConnectionHandle ch) {
try {
final ResultSet rs = getConnection(ch.id).getMetaData().getCatalogs();
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getTableTypes(ConnectionHandle ch) {
try {
final ResultSet rs = getConnection(ch.id).getMetaData().getTableTypes();
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getProcedures(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat procedureNamePattern) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getProcedures(catalog, schemaPattern.s,
procedureNamePattern.s);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getProcedureColumns(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat procedureNamePattern, Pat columnNamePattern) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getProcedureColumns(catalog,
schemaPattern.s, procedureNamePattern.s, columnNamePattern.s);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getColumnPrivileges(ConnectionHandle ch, String catalog, String schema,
String table, Pat columnNamePattern) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getColumnPrivileges(catalog, schema,
table, columnNamePattern.s);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getTablePrivileges(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat tableNamePattern) {
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getTablePrivileges(catalog,
schemaPattern.s, tableNamePattern.s);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getBestRowIdentifier(ConnectionHandle ch, String catalog, String schema,
String table, int scope, boolean nullable) {
LOG.trace("getBestRowIdentifier catalog:{} schema:{} table:{} scope:{} nullable:{}", catalog,
schema, table, scope, nullable);
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getBestRowIdentifier(catalog, schema,
table, scope, nullable);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getVersionColumns(ConnectionHandle ch, String catalog, String schema,
String table) {
LOG.trace("getVersionColumns catalog:{} schema:{} table:{}", catalog, schema, table);
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getVersionColumns(catalog, schema, table);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getPrimaryKeys(ConnectionHandle ch, String catalog, String schema,
String table) {
LOG.trace("getPrimaryKeys catalog:{} schema:{} table:{}", catalog, schema, table);
try {
final ResultSet rs =
getConnection(ch.id).getMetaData().getPrimaryKeys(catalog, schema, table);
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getImportedKeys(ConnectionHandle ch, String catalog, String schema,
String table) {
return null;
}
public MetaResultSet getExportedKeys(ConnectionHandle ch, String catalog, String schema,
String table) {
return null;
}
public MetaResultSet getCrossReference(ConnectionHandle ch, String parentCatalog,
String parentSchema, String parentTable, String foreignCatalog,
String foreignSchema, String foreignTable) {
return null;
}
public MetaResultSet getTypeInfo(ConnectionHandle ch) {
try {
final ResultSet rs = getConnection(ch.id).getMetaData().getTypeInfo();
int stmtId = registerMetaStatement(rs);
return JdbcResultSet.create(ch.id, stmtId, rs);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public MetaResultSet getIndexInfo(ConnectionHandle ch, String catalog, String schema,
String table, boolean unique, boolean approximate) {
return null;
}
public MetaResultSet getUDTs(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat typeNamePattern, int[] types) {
return null;
}
public MetaResultSet getSuperTypes(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat typeNamePattern) {
return null;
}
public MetaResultSet getSuperTables(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat tableNamePattern) {
return null;
}
public MetaResultSet getAttributes(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat typeNamePattern, Pat attributeNamePattern) {
return null;
}
public MetaResultSet getClientInfoProperties(ConnectionHandle ch) {
return null;
}
public MetaResultSet getFunctions(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat functionNamePattern) {
return null;
}
public MetaResultSet getFunctionColumns(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat functionNamePattern, Pat columnNamePattern) {
return null;
}
public MetaResultSet getPseudoColumns(ConnectionHandle ch, String catalog, Pat schemaPattern,
Pat tableNamePattern, Pat columnNamePattern) {
return null;
}
public Iterable<Object> createIterable(StatementHandle handle, QueryState state,
Signature signature, List<TypedValue> parameterValues, Frame firstFrame) {
return null;
}
protected Connection getConnection(String id) throws SQLException {
if (id == null) {
throw new NullPointerException("Connection id is null.");
}
Connection conn = connectionCache.getIfPresent(id);
if (conn == null) {
throw new NoSuchConnectionException("Connection not found: invalid id, closed, or expired: "
+ id);
}
return conn;
}
public StatementHandle createStatement(ConnectionHandle ch) {
try {
final Connection conn = getConnection(ch.id);
final Statement statement = conn.createStatement();
final int id = statementIdGenerator.getAndIncrement();
statementCache.put(id, new StatementInfo(statement));
StatementHandle h = new StatementHandle(ch.id, id, null);
LOG.trace("created statement {}", h);
return h;
} catch (SQLException e) {
throw propagate(e);
}
}
@Override public void closeStatement(StatementHandle h) {
StatementInfo info = statementCache.getIfPresent(h.id);
if (info == null || info.statement == null) {
LOG.debug("client requested close unknown statement {}", h);
return;
}
LOG.trace("closing statement {}", h);
try {
ResultSet results = info.getResultSet();
if (info.isResultSetInitialized() && null != results) {
results.close();
}
info.statement.close();
} catch (SQLException e) {
throw propagate(e);
} finally {
statementCache.invalidate(h.id);
}
}
@Override public void openConnection(ConnectionHandle ch,
Map<String, String> info) {
Properties fullInfo = new Properties();
fullInfo.putAll(this.info);
if (info != null) {
fullInfo.putAll(info);
}
synchronized (this) {
try {
if (connectionCache.asMap().containsKey(ch.id)) {
throw new RuntimeException("Connection already exists: " + ch.id);
}
Connection conn = DriverManager.getConnection(url, fullInfo);
connectionCache.put(ch.id, conn);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
@Override public void closeConnection(ConnectionHandle ch) {
Connection conn = connectionCache.getIfPresent(ch.id);
if (conn == null) {
LOG.debug("client requested close unknown connection {}", ch);
return;
}
LOG.trace("closing connection {}", ch);
try {
conn.close();
} catch (SQLException e) {
throw propagate(e);
} finally {
connectionCache.invalidate(ch.id);
}
}
protected void apply(Connection conn, ConnectionProperties connProps)
throws SQLException {
if (connProps.isAutoCommit() != null) {
conn.setAutoCommit(connProps.isAutoCommit());
}
if (connProps.isReadOnly() != null) {
conn.setReadOnly(connProps.isReadOnly());
}
if (connProps.getTransactionIsolation() != null) {
conn.setTransactionIsolation(connProps.getTransactionIsolation());
}
if (connProps.getCatalog() != null) {
conn.setCatalog(connProps.getCatalog());
}
if (connProps.getSchema() != null) {
conn.setSchema(connProps.getSchema());
}
}
@Override public ConnectionProperties connectionSync(ConnectionHandle ch,
ConnectionProperties connProps) {
LOG.trace("syncing properties for connection {}", ch);
try {
Connection conn = getConnection(ch.id);
ConnectionPropertiesImpl props = new ConnectionPropertiesImpl(conn).merge(connProps);
if (props.isDirty()) {
apply(conn, props);
props.setDirty(false);
}
return props;
} catch (SQLException e) {
throw propagate(e);
}
}
RuntimeException propagate(Throwable e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else if (e instanceof Error) {
throw (Error) e;
} else {
throw new RuntimeException(e);
}
}
public StatementHandle prepare(ConnectionHandle ch, String sql,
long maxRowCount) {
try {
final Connection conn = getConnection(ch.id);
final PreparedStatement statement = conn.prepareStatement(sql);
final int id = statementIdGenerator.getAndIncrement();
Meta.StatementType statementType = null;
if (statement.isWrapperFor(AvaticaPreparedStatement.class)) {
final AvaticaPreparedStatement avaticaPreparedStatement;
avaticaPreparedStatement =
statement.unwrap(AvaticaPreparedStatement.class);
statementType = avaticaPreparedStatement.getStatementType();
}
statementCache.put(id, new StatementInfo(statement));
StatementHandle h = new StatementHandle(ch.id, id,
signature(statement.getMetaData(), statement.getParameterMetaData(),
sql, statementType));
LOG.trace("prepared statement {}", h);
return h;
} catch (SQLException e) {
throw propagate(e);
}
}
public ExecuteResult prepareAndExecute(StatementHandle h, String sql,
long maxRowCount, PrepareCallback callback) throws NoSuchStatementException {
try {
final StatementInfo info = statementCache.getIfPresent(h.id);
if (info == null) {
throw new NoSuchStatementException(h);
}
final Statement statement = info.statement;
// Special handling of maxRowCount as JDBC 0 is unlimited, our meta 0 row
if (maxRowCount > 0) {
AvaticaUtils.setLargeMaxRows(statement, maxRowCount);
} else if (maxRowCount < 0) {
statement.setMaxRows(0);
}
boolean ret = statement.execute(sql);
info.setResultSet(statement.getResultSet());
// Either execute(sql) returned true or the resultSet was null
assert ret || null == info.getResultSet();
final List<MetaResultSet> resultSets = new ArrayList<>();
if (null == info.getResultSet()) {
// Create a special result set that just carries update count
resultSets.add(
JdbcResultSet.count(h.connectionId, h.id,
AvaticaUtils.getLargeUpdateCount(statement)));
} else {
resultSets.add(
JdbcResultSet.create(h.connectionId, h.id, info.getResultSet(), maxRowCount));
}
LOG.trace("prepAndExec statement {}", h);
// TODO: review client to ensure statementId is updated when appropriate
return new ExecuteResult(resultSets);
} catch (SQLException e) {
throw propagate(e);
}
}
public boolean syncResults(StatementHandle sh, QueryState state, long offset)
throws NoSuchStatementException {
try {
final Connection conn = getConnection(sh.connectionId);
final StatementInfo info = statementCache.getIfPresent(sh.id);
if (null == info) {
throw new NoSuchStatementException(sh);
}
final Statement statement = info.statement;
// Let the state recreate the necessary ResultSet on the Statement
info.setResultSet(state.invoke(conn, statement));
if (null != info.getResultSet()) {
// If it is non-null, try to advance to the requested offset.
return info.advanceResultSetToOffset(info.getResultSet(), offset);
}
// No results, nothing to do. Client can move on.
return false;
} catch (SQLException e) {
throw propagate(e);
}
}
public Frame fetch(StatementHandle h, long offset, int fetchMaxRowCount) throws
NoSuchStatementException, MissingResultsException {
LOG.trace("fetching {} offset:{} fetchMaxRowCount:{}", h, offset, fetchMaxRowCount);
try {
final StatementInfo statementInfo = statementCache.getIfPresent(h.id);
if (null == statementInfo) {
// Statement might have expired, or never existed on this server.
throw new NoSuchStatementException(h);
}
if (!statementInfo.isResultSetInitialized()) {
// The Statement exists, but the results are missing. Need to call syncResults(...)
throw new MissingResultsException(h);
}
if (statementInfo.getResultSet() == null) {
return Frame.EMPTY;
} else {
return JdbcResultSet.frame(statementInfo, statementInfo.getResultSet(), offset,
fetchMaxRowCount, calendar);
}
} catch (SQLException e) {
throw propagate(e);
}
}
private static String[] toArray(List<String> typeList) {
if (typeList == null) {
return null;
}
return typeList.toArray(new String[typeList.size()]);
}
@Override public ExecuteResult execute(StatementHandle h,
List<TypedValue> parameterValues, long maxRowCount) throws NoSuchStatementException {
try {
if (MetaImpl.checkParameterValueHasNull(parameterValues)) {
throw new SQLException("exception while executing query: unbound parameter");
}
final StatementInfo statementInfo = statementCache.getIfPresent(h.id);
if (null == statementInfo) {
throw new NoSuchStatementException(h);
}
final List<MetaResultSet> resultSets;
final PreparedStatement preparedStatement =
(PreparedStatement) statementInfo.statement;
if (parameterValues != null) {
for (int i = 0; i < parameterValues.size(); i++) {
TypedValue o = parameterValues.get(i);
preparedStatement.setObject(i + 1, o.toJdbc(calendar));
}
}
if (preparedStatement.execute()) {
final Meta.Frame frame;
final Signature signature2;
if (preparedStatement.isWrapperFor(AvaticaPreparedStatement.class)) {
signature2 = h.signature;
} else {
h.signature = signature(preparedStatement.getMetaData(),
preparedStatement.getParameterMetaData(), h.signature.sql,
Meta.StatementType.SELECT);
signature2 = h.signature;
}
// Make sure we set this for subsequent fetch()'s to find the result set.
statementInfo.setResultSet(preparedStatement.getResultSet());
if (statementInfo.getResultSet() == null) {
frame = Frame.EMPTY;
resultSets = Collections.<MetaResultSet>singletonList(
JdbcResultSet.empty(h.connectionId, h.id, signature2));
} else {
resultSets = Collections.<MetaResultSet>singletonList(
JdbcResultSet.create(h.connectionId, h.id, statementInfo.getResultSet(),
maxRowCount, signature2));
}
} else {
resultSets = Collections.<MetaResultSet>singletonList(
JdbcResultSet.count(h.connectionId, h.id, preparedStatement.getUpdateCount()));
}
return new ExecuteResult(resultSets);
} catch (SQLException e) {
throw propagate(e);
}
}
@Override public void commit(ConnectionHandle ch) {
try {
final Connection conn = getConnection(ch.id);
conn.commit();
} catch (SQLException e) {
throw propagate(e);
}
}
@Override public void rollback(ConnectionHandle ch) {
try {
final Connection conn = getConnection(ch.id);
conn.rollback();
} catch (SQLException e) {
throw propagate(e);
}
}
/** Configurable statement cache settings. */
public enum StatementCacheSettings {
/** JDBC connection property for setting connection cache concurrency level. */
CONCURRENCY_LEVEL(STMT_CACHE_KEY_BASE + ".concurrency", "100"),
/** JDBC connection property for setting connection cache initial capacity. */
INITIAL_CAPACITY(STMT_CACHE_KEY_BASE + ".initialcapacity", "1000"),
/** JDBC connection property for setting connection cache maximum capacity. */
MAX_CAPACITY(STMT_CACHE_KEY_BASE + ".maxcapacity", "10000"),
/** JDBC connection property for setting connection cache expiration duration.
*
* <p>Used in conjunction with {@link #EXPIRY_UNIT}.</p>
*/
EXPIRY_DURATION(STMT_CACHE_KEY_BASE + ".expirydiration", "5"),
/** JDBC connection property for setting connection cache expiration unit.
*
* <p>Used in conjunction with {@link #EXPIRY_DURATION}.</p>
*/
EXPIRY_UNIT(STMT_CACHE_KEY_BASE + ".expiryunit", TimeUnit.MINUTES.name());
private final String key;
private final String defaultValue;
StatementCacheSettings(String key, String defaultValue) {
this.key = key;
this.defaultValue = defaultValue;
}
/** The configuration key for specifying this setting. */
public String key() {
return key;
}
/** The default value for this setting. */
public String defaultValue() {
return defaultValue;
}
}
/** Configurable connection cache settings. */
public enum ConnectionCacheSettings {
/** JDBC connection property for setting connection cache concurrency level. */
CONCURRENCY_LEVEL(CONN_CACHE_KEY_BASE + ".concurrency", "10"),
/** JDBC connection property for setting connection cache initial capacity. */
INITIAL_CAPACITY(CONN_CACHE_KEY_BASE + ".initialcapacity", "100"),
/** JDBC connection property for setting connection cache maximum capacity. */
MAX_CAPACITY(CONN_CACHE_KEY_BASE + ".maxcapacity", "1000"),
/** JDBC connection property for setting connection cache expiration duration. */
EXPIRY_DURATION(CONN_CACHE_KEY_BASE + ".expiryduration", "10"),
/** JDBC connection property for setting connection cache expiration unit. */
EXPIRY_UNIT(CONN_CACHE_KEY_BASE + ".expiryunit", TimeUnit.MINUTES.name());
private final String key;
private final String defaultValue;
ConnectionCacheSettings(String key, String defaultValue) {
this.key = key;
this.defaultValue = defaultValue;
}
/** The configuration key for specifying this setting. */
public String key() {
return key;
}
/** The default value for this setting. */
public String defaultValue() {
return defaultValue;
}
}
/** Callback for {@link #connectionCache} member expiration. */
private class ConnectionExpiryHandler
implements RemovalListener<String, Connection> {
public void onRemoval(RemovalNotification<String, Connection> notification) {
String connectionId = notification.getKey();
Connection doomed = notification.getValue();
LOG.debug("Expiring connection {} because {}", connectionId, notification.getCause());
try {
if (doomed != null) {
doomed.close();
}
} catch (Throwable t) {
LOG.info("Exception thrown while expiring connection {}", connectionId, t);
}
}
}
/** Callback for {@link #statementCache} member expiration. */
private class StatementExpiryHandler
implements RemovalListener<Integer, StatementInfo> {
public void onRemoval(RemovalNotification<Integer, StatementInfo> notification) {
Integer stmtId = notification.getKey();
StatementInfo doomed = notification.getValue();
if (doomed == null) {
// log/throw?
return;
}
LOG.debug("Expiring statement {} because {}", stmtId, notification.getCause());
try {
if (doomed.getResultSet() != null) {
doomed.getResultSet().close();
}
if (doomed.statement != null) {
doomed.statement.close();
}
} catch (Throwable t) {
LOG.info("Exception thrown while expiring statement {}", stmtId, t);
}
}
}
}
// End JdbcMeta.java
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.common.block;
import io.airlift.slice.SliceInput;
import io.airlift.slice.SliceOutput;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.function.BiConsumer;
import static com.facebook.presto.common.block.BlockUtil.calculateBlockResetSize;
import static com.facebook.presto.common.block.BlockUtil.checkArrayRange;
import static com.facebook.presto.common.block.BlockUtil.checkValidRegion;
import static com.facebook.presto.common.block.BlockUtil.countUsedPositions;
import static com.facebook.presto.common.block.BlockUtil.internalPositionInRange;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.lang.Math.max;
import static java.lang.Math.toIntExact;
public class LongArrayBlockBuilder
implements BlockBuilder
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(LongArrayBlockBuilder.class).instanceSize();
private static final Block NULL_VALUE_BLOCK = new LongArrayBlock(0, 1, new boolean[] {true}, new long[1]);
@Nullable
private BlockBuilderStatus blockBuilderStatus;
private boolean initialized;
private int initialEntryCount;
private int positionCount;
private boolean hasNullValue;
private boolean hasNonNullValue;
// it is assumed that these arrays are the same length
private boolean[] valueIsNull = new boolean[0];
private long[] values = new long[0];
private long retainedSizeInBytes;
public LongArrayBlockBuilder(@Nullable BlockBuilderStatus blockBuilderStatus, int expectedEntries)
{
this.blockBuilderStatus = blockBuilderStatus;
this.initialEntryCount = max(expectedEntries, 1);
updateDataSize();
}
@Override
public BlockBuilder writeLong(long value)
{
if (values.length <= positionCount) {
growCapacity();
}
values[positionCount] = value;
hasNonNullValue = true;
positionCount++;
if (blockBuilderStatus != null) {
blockBuilderStatus.addBytes(Byte.BYTES + Long.BYTES);
}
return this;
}
@Override
public BlockBuilder closeEntry()
{
return this;
}
@Override
public BlockBuilder appendNull()
{
if (values.length <= positionCount) {
growCapacity();
}
valueIsNull[positionCount] = true;
hasNullValue = true;
positionCount++;
if (blockBuilderStatus != null) {
blockBuilderStatus.addBytes(Byte.BYTES + Long.BYTES);
}
return this;
}
@Override
public Block build()
{
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount);
}
return new LongArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values);
}
@Override
public BlockBuilder newBlockBuilderLike(BlockBuilderStatus blockBuilderStatus)
{
return new LongArrayBlockBuilder(blockBuilderStatus, calculateBlockResetSize(positionCount));
}
private void growCapacity()
{
int newSize;
if (initialized) {
newSize = BlockUtil.calculateNewArraySize(values.length);
}
else {
newSize = initialEntryCount;
initialized = true;
}
valueIsNull = Arrays.copyOf(valueIsNull, newSize);
values = Arrays.copyOf(values, newSize);
updateDataSize();
}
private void updateDataSize()
{
retainedSizeInBytes = INSTANCE_SIZE + sizeOf(valueIsNull) + sizeOf(values);
if (blockBuilderStatus != null) {
retainedSizeInBytes += BlockBuilderStatus.INSTANCE_SIZE;
}
}
@Override
public long getSizeInBytes()
{
return (Long.BYTES + Byte.BYTES) * (long) positionCount;
}
@Override
public long getRegionSizeInBytes(int position, int length)
{
return (Long.BYTES + Byte.BYTES) * (long) length;
}
@Override
public long getPositionsSizeInBytes(boolean[] positions)
{
return (Long.BYTES + Byte.BYTES) * (long) countUsedPositions(positions);
}
@Override
public long getRetainedSizeInBytes()
{
return retainedSizeInBytes;
}
@Override
public long getEstimatedDataSizeForStats(int position)
{
return isNull(position) ? 0 : Long.BYTES;
}
@Override
public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer)
{
consumer.accept(values, sizeOf(values));
consumer.accept(valueIsNull, sizeOf(valueIsNull));
consumer.accept(this, (long) INSTANCE_SIZE);
}
@Override
public int getPositionCount()
{
return positionCount;
}
@Override
public long getLong(int position)
{
checkReadablePosition(position);
return values[position];
}
@Override
@Deprecated
// TODO: Remove when we fix intermediate types on aggregations.
public int getInt(int position)
{
checkReadablePosition(position);
return toIntExact(values[position]);
}
@Override
@Deprecated
// TODO: Remove when we fix intermediate types on aggregations.
public short getShort(int position)
{
checkReadablePosition(position);
short value = (short) (values[position]);
if (value != values[position]) {
throw new ArithmeticException("short overflow");
}
return value;
}
@Override
@Deprecated
// TODO: Remove when we fix intermediate types on aggregations.
public byte getByte(int position)
{
checkReadablePosition(position);
byte value = (byte) (values[position]);
if (value != values[position]) {
throw new ArithmeticException("byte overflow");
}
return value;
}
@Override
public boolean mayHaveNull()
{
return hasNullValue;
}
@Override
public boolean isNull(int position)
{
checkReadablePosition(position);
return valueIsNull[position];
}
@Override
public void writePositionTo(int position, BlockBuilder blockBuilder)
{
checkReadablePosition(position);
blockBuilder.writeLong(values[position]);
blockBuilder.closeEntry();
}
@Override
public void writePositionTo(int position, SliceOutput output)
{
if (isNull(position)) {
output.writeByte(0);
}
else {
output.writeByte(1);
output.writeLong(values[position]);
}
}
@Override
public BlockBuilder readPositionFrom(SliceInput input)
{
boolean isNull = input.readByte() == 0;
if (isNull) {
appendNull();
}
else {
writeLong(input.readLong());
closeEntry();
}
return this;
}
@Override
public Block getSingleValueBlock(int position)
{
checkReadablePosition(position);
return new LongArrayBlock(
0,
1,
valueIsNull[position] ? new boolean[] {true} : null,
new long[] {values[position]});
}
@Override
public Block copyPositions(int[] positions, int offset, int length)
{
checkArrayRange(positions, offset, length);
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length);
}
boolean[] newValueIsNull = null;
if (hasNullValue) {
newValueIsNull = new boolean[length];
}
long[] newValues = new long[length];
for (int i = 0; i < length; i++) {
int position = positions[offset + i];
checkReadablePosition(position);
if (hasNullValue) {
newValueIsNull[i] = valueIsNull[position];
}
newValues[i] = values[position];
}
return new LongArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public Block getRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length);
}
return new LongArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values);
}
@Override
public Block copyRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length);
}
boolean[] newValueIsNull = null;
if (hasNullValue) {
newValueIsNull = Arrays.copyOfRange(valueIsNull, positionOffset, positionOffset + length);
}
long[] newValues = Arrays.copyOfRange(values, positionOffset, positionOffset + length);
return new LongArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public String getEncodingName()
{
return LongArrayBlockEncoding.NAME;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder("LongArrayBlockBuilder{");
sb.append("positionCount=").append(getPositionCount());
sb.append('}');
return sb.toString();
}
private void checkReadablePosition(int position)
{
if (position < 0 || position >= getPositionCount()) {
throw new IllegalArgumentException("position is not valid");
}
}
@Override
public boolean isNullUnchecked(int internalPosition)
{
assert mayHaveNull() : "no nulls present";
assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount());
return valueIsNull[internalPosition];
}
@Override
public long getLongUnchecked(int internalPosition)
{
assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount());
return values[internalPosition];
}
@Override
public int getOffsetBase()
{
return 0;
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInsight.NullableNotNullManager;
import com.intellij.codeInspection.dataFlow.MethodContract.ValueConstraint;
import com.intellij.codeInspection.dataFlow.instructions.MethodCallInstruction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.SideEffectChecker;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import static com.intellij.codeInspection.dataFlow.MethodContract.ValueConstraint.*;
/**
* @author peter
*/
public class ContractInference {
public static final int MAX_CONTRACT_COUNT = 10;
@NotNull
public static List<MethodContract> inferContracts(@NotNull final PsiMethod method) {
if (!InferenceFromSourceUtil.shouldInferFromSource(method)) {
return Collections.emptyList();
}
return CachedValuesManager.getCachedValue(method, () -> {
List<MethodContract> result = RecursionManager.doPreventingRecursion(method, true, () ->
new ContractInferenceInterpreter(method).inferContracts());
if (result == null) result = Collections.emptyList();
return CachedValueProvider.Result.create(result, method, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT);
});
}
}
class ContractInferenceInterpreter {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.dataFlow.ContractInferenceInterpreter");
private final PsiMethod myMethod;
private final ValueConstraint[] myEmptyConstraints;
public ContractInferenceInterpreter(PsiMethod method) {
myMethod = method;
myEmptyConstraints = MethodContract.createConstraintArray(myMethod.getParameterList().getParametersCount());
}
List<MethodContract> inferContracts() {
List<MethodContract> contracts = doInferContracts();
if (contracts.isEmpty()) return contracts;
PsiTypeElement typeElement = myMethod.getReturnTypeElement();
final PsiType returnType = typeElement == null ? null : typeElement.getType();
boolean referenceTypeReturned = !(returnType instanceof PsiPrimitiveType);
final boolean notNull = referenceTypeReturned &&
NullableNotNullManager.getInstance(myMethod.getProject()).isNotNull(myMethod, false);
if (referenceTypeReturned) {
contracts = boxReturnValues(contracts);
}
List<MethodContract> compatible = ContainerUtil.filter(contracts, contract -> {
if (notNull && contract.returnValue == NOT_NULL_VALUE) {
return false;
}
return InferenceFromSourceUtil.isReturnTypeCompatible(returnType, contract.returnValue);
});
if (compatible.size() > ContractInference.MAX_CONTRACT_COUNT) {
LOG.debug("Too many contracts for " + PsiUtil.getMemberQualifiedName(myMethod) + ", shrinking the list");
return compatible.subList(0, ContractInference.MAX_CONTRACT_COUNT);
}
return compatible;
}
@NotNull
private static List<MethodContract> boxReturnValues(List<MethodContract> contracts) {
return ContainerUtil.mapNotNull(contracts, contract -> {
if (contract.returnValue == FALSE_VALUE || contract.returnValue == TRUE_VALUE) {
return new MethodContract(contract.arguments, NOT_NULL_VALUE);
}
return contract;
});
}
private List<MethodContract> doInferContracts() {
PsiCodeBlock body = myMethod.getBody();
PsiStatement[] statements = body == null ? PsiStatement.EMPTY_ARRAY : body.getStatements();
if (statements.length == 0) return Collections.emptyList();
if (statements.length == 1) {
if (statements[0] instanceof PsiReturnStatement) {
List<MethodContract> result = handleDelegation(((PsiReturnStatement)statements[0]).getReturnValue(), false);
if (result != null) {
return result;
}
}
else if (statements[0] instanceof PsiExpressionStatement && ((PsiExpressionStatement)statements[0]).getExpression() instanceof PsiMethodCallExpression) {
List<MethodContract> result = handleDelegation(((PsiExpressionStatement)statements[0]).getExpression(), false);
if (result != null) return result;
}
}
return visitStatements(Collections.singletonList(myEmptyConstraints), statements);
}
@Nullable
private List<MethodContract> handleDelegation(final PsiExpression expression, final boolean negated) {
if (expression instanceof PsiParenthesizedExpression) {
return handleDelegation(((PsiParenthesizedExpression)expression).getExpression(), negated);
}
if (expression instanceof PsiPrefixExpression && ((PsiPrefixExpression)expression).getOperationTokenType() == JavaTokenType.EXCL) {
return handleDelegation(((PsiPrefixExpression)expression).getOperand(), !negated);
}
if (expression instanceof PsiMethodCallExpression) {
return handleCallDelegation((PsiMethodCallExpression)expression, negated);
}
return null;
}
private List<MethodContract> handleCallDelegation(PsiMethodCallExpression expression, final boolean negated) {
JavaResolveResult result = expression.resolveMethodGenerics();
final PsiMethod targetMethod = (PsiMethod)result.getElement();
if (targetMethod == null) return Collections.emptyList();
final PsiParameter[] parameters = targetMethod.getParameterList().getParameters();
final PsiExpression[] arguments = expression.getArgumentList().getExpressions();
final boolean varArgCall = MethodCallInstruction.isVarArgCall(targetMethod, result.getSubstitutor(), arguments, parameters);
final boolean notNull = NullableNotNullManager.isNotNull(targetMethod);
List<MethodContract> fromDelegate = ContainerUtil.mapNotNull(ControlFlowAnalyzer.getMethodContracts(targetMethod), delegateContract -> {
ValueConstraint[] answer = myEmptyConstraints;
for (int i = 0; i < delegateContract.arguments.length; i++) {
if (i >= arguments.length) return null;
ValueConstraint argConstraint = delegateContract.arguments[i];
if (argConstraint != ANY_VALUE) {
if (varArgCall && i >= parameters.length - 1) {
if (argConstraint == NULL_VALUE) {
return null;
}
break;
}
int paramIndex = resolveParameter(arguments[i]);
if (paramIndex < 0) {
if (argConstraint != getLiteralConstraint(arguments[i])) {
return null;
}
}
else {
answer = withConstraint(answer, paramIndex, argConstraint);
if (answer == null) {
return null;
}
}
}
}
ValueConstraint returnValue = negated ? negateConstraint(delegateContract.returnValue) : delegateContract.returnValue;
if (notNull && returnValue != THROW_EXCEPTION) {
returnValue = NOT_NULL_VALUE;
}
return answer == null ? null : new MethodContract(answer, returnValue);
});
if (notNull) {
return ContainerUtil.concat(fromDelegate, Collections.singletonList(new MethodContract(myEmptyConstraints, NOT_NULL_VALUE)));
}
return fromDelegate;
}
@NotNull
private List<MethodContract> visitExpression(final List<ValueConstraint[]> states, @Nullable PsiExpression expr) {
if (states.isEmpty()) return Collections.emptyList();
if (states.size() > 300) return Collections.emptyList(); // too complex
if (expr instanceof PsiPolyadicExpression) {
PsiExpression[] operands = ((PsiPolyadicExpression)expr).getOperands();
IElementType op = ((PsiPolyadicExpression)expr).getOperationTokenType();
if (operands.length == 2 && (op == JavaTokenType.EQEQ || op == JavaTokenType.NE)) {
return visitEqualityComparison(states, operands[0], operands[1], op == JavaTokenType.EQEQ);
}
if (op == JavaTokenType.ANDAND || op == JavaTokenType.OROR) {
return visitLogicalOperation(operands, op == JavaTokenType.ANDAND, states);
}
}
if (expr instanceof PsiConditionalExpression) {
List<MethodContract> conditionResults = visitExpression(states, ((PsiConditionalExpression)expr).getCondition());
return ContainerUtil.concat(
visitExpression(antecedentsReturning(conditionResults, TRUE_VALUE), ((PsiConditionalExpression)expr).getThenExpression()),
visitExpression(antecedentsReturning(conditionResults, FALSE_VALUE), ((PsiConditionalExpression)expr).getElseExpression()));
}
if (expr instanceof PsiParenthesizedExpression) {
return visitExpression(states, ((PsiParenthesizedExpression)expr).getExpression());
}
if (expr instanceof PsiTypeCastExpression) {
return visitExpression(states, ((PsiTypeCastExpression)expr).getOperand());
}
if (expr instanceof PsiPrefixExpression && ((PsiPrefixExpression)expr).getOperationTokenType() == JavaTokenType.EXCL) {
List<MethodContract> result = ContainerUtil.newArrayList();
for (MethodContract contract : visitExpression(states, ((PsiPrefixExpression)expr).getOperand())) {
if (contract.returnValue == TRUE_VALUE || contract.returnValue == FALSE_VALUE) {
result.add(new MethodContract(contract.arguments, negateConstraint(contract.returnValue)));
}
}
return result;
}
if (expr instanceof PsiInstanceOfExpression) {
final int parameter = resolveParameter(((PsiInstanceOfExpression)expr).getOperand());
if (parameter >= 0) {
return ContainerUtil.mapNotNull(states, state -> contractWithConstraint(state, parameter, NULL_VALUE, FALSE_VALUE));
}
}
if (expr instanceof PsiNewExpression) {
return toContracts(states, NOT_NULL_VALUE);
}
if (expr instanceof PsiMethodCallExpression) {
PsiMethod method = ((PsiMethodCallExpression)expr).resolveMethod();
if (method != null && NullableNotNullManager.isNotNull(method)) {
return toContracts(states, NOT_NULL_VALUE);
}
}
final ValueConstraint constraint = getLiteralConstraint(expr);
if (constraint != null) {
return toContracts(states, constraint);
}
int paramIndex = resolveParameter(expr);
if (paramIndex >= 0) {
List<MethodContract> result = ContainerUtil.newArrayList();
for (ValueConstraint[] state : states) {
if (state[paramIndex] != ANY_VALUE) {
// the second 'o' reference in cases like: if (o != null) return o;
result.add(new MethodContract(state, state[paramIndex]));
} else if (textMatches(getParameter(paramIndex).getTypeElement(), PsiKeyword.BOOLEAN)) {
// if (boolValue) ...
ContainerUtil.addIfNotNull(result, contractWithConstraint(state, paramIndex, TRUE_VALUE, TRUE_VALUE));
ContainerUtil.addIfNotNull(result, contractWithConstraint(state, paramIndex, FALSE_VALUE, FALSE_VALUE));
}
}
return result;
}
return Collections.emptyList();
}
@Nullable
private MethodContract contractWithConstraint(ValueConstraint[] state,
int parameter, ValueConstraint paramConstraint,
ValueConstraint returnValue) {
ValueConstraint[] newState = withConstraint(state, parameter, paramConstraint);
return newState == null ? null : new MethodContract(newState, returnValue);
}
private static boolean textMatches(@Nullable PsiTypeElement typeElement, @NotNull String text) {
return typeElement != null && typeElement.textMatches(text);
}
private List<MethodContract> visitEqualityComparison(List<ValueConstraint[]> states,
PsiExpression op1,
PsiExpression op2,
boolean equality) {
int parameter = resolveParameter(op1);
ValueConstraint constraint = getLiteralConstraint(op2);
if (parameter < 0 || constraint == null) {
parameter = resolveParameter(op2);
constraint = getLiteralConstraint(op1);
}
if (parameter >= 0 && constraint != null) {
List<MethodContract> result = ContainerUtil.newArrayList();
for (ValueConstraint[] state : states) {
if (constraint == NOT_NULL_VALUE) {
if (!(getParameter(parameter).getType() instanceof PsiPrimitiveType)) {
ContainerUtil.addIfNotNull(result, contractWithConstraint(state, parameter, NULL_VALUE, equality ? FALSE_VALUE : TRUE_VALUE));
}
} else {
ContainerUtil.addIfNotNull(result, contractWithConstraint(state, parameter, constraint, equality ? TRUE_VALUE : FALSE_VALUE));
ContainerUtil.addIfNotNull(result, contractWithConstraint(state, parameter, negateConstraint(constraint),
equality ? FALSE_VALUE : TRUE_VALUE));
}
}
return result;
}
return Collections.emptyList();
}
private PsiParameter getParameter(int parameter) {
return myMethod.getParameterList().getParameters()[parameter];
}
private static List<MethodContract> toContracts(List<ValueConstraint[]> states, ValueConstraint constraint) {
return ContainerUtil.map(states, state -> new MethodContract(state, constraint));
}
private List<MethodContract> visitLogicalOperation(PsiExpression[] operands, boolean conjunction, List<ValueConstraint[]> states) {
ValueConstraint breakValue = conjunction ? FALSE_VALUE : TRUE_VALUE;
List<MethodContract> finalStates = ContainerUtil.newArrayList();
for (PsiExpression operand : operands) {
List<MethodContract> opResults = visitExpression(states, operand);
finalStates.addAll(ContainerUtil.filter(opResults, contract -> contract.returnValue == breakValue));
states = antecedentsReturning(opResults, negateConstraint(breakValue));
}
finalStates.addAll(toContracts(states, negateConstraint(breakValue)));
return finalStates;
}
private static List<ValueConstraint[]> antecedentsReturning(List<MethodContract> values, ValueConstraint result) {
return ContainerUtil.mapNotNull(values, contract -> contract.returnValue == result ? contract.arguments : null);
}
@NotNull
private List<MethodContract> visitStatements(List<ValueConstraint[]> states, PsiStatement... statements) {
List<MethodContract> result = ContainerUtil.newArrayList();
for (PsiStatement statement : statements) {
if (statement instanceof PsiBlockStatement) {
result.addAll(visitStatements(states, ((PsiBlockStatement)statement).getCodeBlock().getStatements()));
}
else if (statement instanceof PsiIfStatement) {
List<MethodContract> conditionResults = visitExpression(states, ((PsiIfStatement)statement).getCondition());
PsiStatement thenBranch = ((PsiIfStatement)statement).getThenBranch();
if (thenBranch != null) {
result.addAll(visitStatements(antecedentsReturning(conditionResults, TRUE_VALUE), thenBranch));
}
List<ValueConstraint[]> falseStates = antecedentsReturning(conditionResults, FALSE_VALUE);
PsiStatement elseBranch = ((PsiIfStatement)statement).getElseBranch();
if (elseBranch != null) {
result.addAll(visitStatements(falseStates, elseBranch));
} else {
states = falseStates;
continue;
}
}
else if (statement instanceof PsiThrowStatement) {
result.addAll(toContracts(states, THROW_EXCEPTION));
}
else if (statement instanceof PsiReturnStatement) {
result.addAll(visitExpression(states, ((PsiReturnStatement)statement).getReturnValue()));
}
else if (statement instanceof PsiAssertStatement) {
List<MethodContract> conditionResults = visitExpression(states, ((PsiAssertStatement)statement).getAssertCondition());
result.addAll(toContracts(antecedentsReturning(conditionResults, FALSE_VALUE), THROW_EXCEPTION));
}
else if (statement instanceof PsiDeclarationStatement && !mayHaveSideEffects((PsiDeclarationStatement)statement)) {
continue;
}
else if (statement instanceof PsiDoWhileStatement) {
result.addAll(visitStatements(states, ((PsiDoWhileStatement)statement).getBody()));
}
break; // visit only the first statement unless it's 'if' whose 'then' always returns and the next statement is effectively 'else'
}
return result;
}
private static boolean mayHaveSideEffects(PsiDeclarationStatement statement) {
for (PsiElement element : statement.getDeclaredElements()) {
if (element instanceof PsiVariable) {
PsiExpression initializer = ((PsiVariable)element).getInitializer();
if (initializer != null && SideEffectChecker.mayHaveSideEffects(initializer)) {
return true;
}
}
}
return false;
}
@Nullable
private static ValueConstraint getLiteralConstraint(@Nullable PsiExpression expr) {
if (expr instanceof PsiLiteralExpression) {
if (expr.textMatches(PsiKeyword.TRUE)) return TRUE_VALUE;
if (expr.textMatches(PsiKeyword.FALSE)) return FALSE_VALUE;
if (expr.textMatches(PsiKeyword.NULL)) return NULL_VALUE;
return NOT_NULL_VALUE;
}
return null;
}
private static ValueConstraint negateConstraint(@NotNull ValueConstraint constraint) {
//noinspection EnumSwitchStatementWhichMissesCases
switch (constraint) {
case NULL_VALUE: return NOT_NULL_VALUE;
case NOT_NULL_VALUE: return NULL_VALUE;
case TRUE_VALUE: return FALSE_VALUE;
case FALSE_VALUE: return TRUE_VALUE;
}
return constraint;
}
private int resolveParameter(@Nullable PsiExpression expr) {
if (expr instanceof PsiReferenceExpression && !((PsiReferenceExpression)expr).isQualified()) {
String name = expr.getText();
PsiParameter[] parameters = myMethod.getParameterList().getParameters();
for (int i = 0; i < parameters.length; i++) {
if (name.equals(parameters[i].getName())) {
return i;
}
}
}
return -1;
}
@Nullable
private ValueConstraint[] withConstraint(ValueConstraint[] constraints, int index, ValueConstraint constraint) {
if (constraints[index] == constraint) return constraints;
ValueConstraint negated = negateConstraint(constraint);
if (negated != constraint && constraints[index] == negated) {
return null;
}
if (constraint == NULL_VALUE && NullableNotNullManager.isNotNull(getParameter(index))) {
return null;
}
ValueConstraint[] copy = constraints.clone();
copy[index] = constraint;
return copy;
}
}
|
|
/* Copyright 2017 Alfa Financial Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.alfasoftware.morf.metadata;
import static org.alfasoftware.morf.metadata.SchemaUtils.copy;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.ImmutableList;
/**
* Caches a source schema as a bean for efficient reading.
*
* @author Copyright (c) Alfa Financial Software 2010
*/
class SchemaBean implements Schema {
/**
* Holds all the tables represented in this schema.
*/
private final Map<String, Table> tables = new HashMap<>();
/**
* Holds all the views represented in this schema.
*/
private final Map<String, View> views = new HashMap<>();
/**
* Creates a schema. Views and tables from the schema are cloned.
*
* @param schema Schema to copy.
*/
SchemaBean(Schema schema) {
super();
for (Table table : schema.tables()) {
Table clone = copy(table);
tables.put(clone.getName().toUpperCase(), clone);
}
for (View view : schema.views()) {
View clone = copy(view);
views.put(clone.getName().toUpperCase(), clone);
}
}
/**
* Create an empty schema.
*/
SchemaBean() {
this(Collections.<Table>emptyList(), Collections.<View>emptyList());
}
/**
* Creates a schema.
*
* @param tables The tables included in the schema.
*/
SchemaBean(Table... tables) {
this(ImmutableList.copyOf(tables), Collections.<View>emptyList());
}
/**
* Creates a schema.
*
* @param views The views included in the schema.
*/
SchemaBean(View... views) {
this(Collections.<Table>emptyList(), ImmutableList.copyOf(views));
}
/**
* Creates a schema.
*
* @param tables The tables included in the schema.
* @param views The views included in the schema.
*/
SchemaBean(Iterable<Table> tables, Iterable<View> views) {
super();
for (Table table : tables) {
this.tables.put(table.getName().toUpperCase(), table);
}
for (View view : views) {
this.views.put(view.getName().toUpperCase(), view);
}
}
/**
* Creates a schema.
*
* @param tables The tables included in the schema.
*/
SchemaBean(Iterable<Table> tables) {
this(tables, Collections.<View>emptyList());
}
/**
* {@inheritDoc}
*
* @see org.alfasoftware.morf.metadata.Schema#getTable(java.lang.String)
*/
@Override
public Table getTable(String name) {
return tables.get(name.toUpperCase());
}
/**
* {@inheritDoc}
*
* @see org.alfasoftware.morf.metadata.Schema#isEmptyDatabase()
*/
@Override
public boolean isEmptyDatabase() {
return tables.isEmpty();
}
/**
* {@inheritDoc}
*
* @see org.alfasoftware.morf.metadata.Schema#tableExists(java.lang.String)
*/
@Override
public boolean tableExists(String name) {
return tables.containsKey(name.toUpperCase());
}
/**
* {@inheritDoc}
*
* @see org.alfasoftware.morf.metadata.Schema#tableNames()
*/
@Override
public Collection<String> tableNames() {
// Implemented like this rather than tables.keySet() to retain case
Set<String> names = new HashSet<>();
for (Table table : tables.values()) {
names.add(table.getName());
}
return names;
}
/**
* {@inheritDoc}
*
* @see org.alfasoftware.morf.metadata.Schema#tables()
*/
@Override
public Collection<Table> tables() {
return tables.values();
}
/**
* @see org.alfasoftware.morf.metadata.Schema#viewExists(java.lang.String)
*/
@Override
public boolean viewExists(String name) {
return views.containsKey(name.toUpperCase());
}
/**
* @see org.alfasoftware.morf.metadata.Schema#getView(java.lang.String)
*/
@Override
public View getView(String name) {
return views.get(name.toUpperCase());
}
/**
* @see org.alfasoftware.morf.metadata.Schema#viewNames()
*/
@Override
public Collection<String> viewNames() {
// Implemented like this rather than views.keySet() to retain case
Set<String> names = new HashSet<>();
for (View view : views.values()) {
names.add(view.getName());
}
return names;
}
/**
* @see org.alfasoftware.morf.metadata.Schema#views()
*/
@Override
public Collection<View> views() {
return views.values();
}
@Override
public String toString() {
return "Schema[" + tables().size() + " tables, " + views().size() + " views]";
}
}
|
|
package eu.se_bastiaan.tvnl.ui.fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.devbrackets.android.exomedia.listener.OnCompletionListener;
import com.devbrackets.android.exomedia.listener.OnErrorListener;
import com.devbrackets.android.exomedia.listener.OnPreparedListener;
import com.devbrackets.android.exomedia.util.Repeater;
import butterknife.BindView;
import butterknife.ButterKnife;
import eu.se_bastiaan.tvnl.R;
import eu.se_bastiaan.tvnl.exomedia.TVNLVideoView;
import eu.se_bastiaan.tvnl.model.StreamInfo;
import eu.se_bastiaan.tvnl.ui.dialog.MessageDialogFragment;
import eu.se_bastiaan.tvnl.ui.presenter.VideoPlayerFragPresenter;
import nucleus.factory.PresenterFactory;
import nucleus.factory.ReflectionPresenterFactory;
import nucleus.factory.RequiresPresenter;
import nucleus.view.PresenterLifecycleDelegate;
import nucleus.view.ViewWithPresenter;
@RequiresPresenter(VideoPlayerFragPresenter.class)
public class VideoPlayerFragment extends Fragment implements ViewWithPresenter<VideoPlayerFragPresenter> {
Callback callback;
View rootView;
StreamInfo streamInfo;
@NonNull
protected Repeater progressPollRepeater = new Repeater();
@BindView(R.id.video_view)
TVNLVideoView videoView;
@BindView(R.id.progress_indicator)
ProgressBar progressIndicator;
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof Callback) callback = (Callback) context;
}
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
if (bundle != null)
presenterDelegate.onRestoreInstanceState(bundle.getBundle(PRESENTER_STATE_KEY));
setRetainInstance(true);
setHasOptionsMenu(true);
}
@Override
public void onSaveInstanceState(Bundle bundle) {
super.onSaveInstanceState(bundle);
bundle.putBundle(PRESENTER_STATE_KEY, presenterDelegate.onSaveInstanceState());
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_videoplayer, container, false);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
rootView = view;
ButterKnife.bind(this, view);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
streamInfo = callback.getInfo();
if (streamInfo == null){
getActivity().finish();
return;
}
setRetainInstance(true);
videoView.setVisibility(View.VISIBLE);
}
@Override
public void onResume() {
super.onResume();
presenterDelegate.onResume(this);
videoView.setOnCompletionListener(onCompletionListener);
videoView.setOnPreparedListener(onPreparedListener);
videoView.setOnErrorListener(onErrorListener);
progressPollRepeater.start();
progressPollRepeater.setRepeatListener(progressPollRepeatListener);
}
@Override
public void onPause() {
super.onPause();
progressPollRepeater.stop();
progressPollRepeater.setRepeatListener(null);
}
@Override
public void onDestroy() {
super.onDestroy();
presenterDelegate.onDestroy(getActivity().isFinishing());
videoView.stopPlayback();
}
public TVNLVideoView getVideoView() {
return videoView;
}
public StreamInfo getStreamInfo() {
return streamInfo;
}
public void setProgressVisible(boolean visible) {
if(progressIndicator.getVisibility() == View.VISIBLE && visible)
return;
if(progressIndicator.getVisibility() == View.GONE && !visible)
return;
progressIndicator.setVisibility(visible ? View.VISIBLE : View.GONE);
}
public void showPlayerInfo(String info) {
Toast.makeText(getActivity(), info, Toast.LENGTH_SHORT).show();
}
public void showErrorDialog(String title, String message) {
if (!getActivity().isFinishing())
MessageDialogFragment.show(getFragmentManager(), title, message, false, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
getPresenter().loadMedia(VideoPlayerFragment.this);
}
}, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
getActivity().finish();
}
});
}
public interface Callback {
StreamInfo getInfo();
}
/* Presenter section */
private static final String PRESENTER_STATE_KEY = "presenter_state";
private PresenterLifecycleDelegate<VideoPlayerFragPresenter> presenterDelegate =
new PresenterLifecycleDelegate<>(ReflectionPresenterFactory.<VideoPlayerFragPresenter>fromViewClass(getClass()));
@Override
public PresenterFactory<VideoPlayerFragPresenter> getPresenterFactory() {
return presenterDelegate.getPresenterFactory();
}
@Override
public void setPresenterFactory(PresenterFactory<VideoPlayerFragPresenter> presenterFactory) {
presenterDelegate.setPresenterFactory(presenterFactory);
}
@Override
public VideoPlayerFragPresenter getPresenter() {
return presenterDelegate.getPresenter();
}
/* Listener section */
OnPreparedListener onPreparedListener = new OnPreparedListener() {
@Override
public void onPrepared() {
getPresenter().isReady(VideoPlayerFragment.this, getVideoView());
}
};
OnCompletionListener onCompletionListener = new OnCompletionListener() {
@Override
public void onCompletion() {
getPresenter().isCompleted(getVideoView());
}
};
OnErrorListener onErrorListener = new OnErrorListener() {
@Override
public boolean onError() {
getPresenter().encounteredError(VideoPlayerFragment.this, getVideoView());
return true;
}
};
Repeater.RepeatListener progressPollRepeatListener = new Repeater.RepeatListener() {
@Override
public void onRepeat() {
if(getVideoView().getDuration() > 0)
getPresenter().updateProgress(getVideoView().getCurrentPosition(), getVideoView().getDuration());
}
};
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.desktopvirtualization.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.desktopvirtualization.models.SessionHostHealthCheckReport;
import com.azure.resourcemanager.desktopvirtualization.models.Status;
import com.azure.resourcemanager.desktopvirtualization.models.UpdateState;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
import java.util.List;
/** Schema for SessionHost properties. */
@Fluent
public final class SessionHostProperties {
@JsonIgnore private final ClientLogger logger = new ClientLogger(SessionHostProperties.class);
/*
* ObjectId of SessionHost. (internal use)
*/
@JsonProperty(value = "objectId", access = JsonProperty.Access.WRITE_ONLY)
private String objectId;
/*
* Last heart beat from SessionHost.
*/
@JsonProperty(value = "lastHeartBeat")
private OffsetDateTime lastHeartBeat;
/*
* Number of sessions on SessionHost.
*/
@JsonProperty(value = "sessions")
private Integer sessions;
/*
* Version of agent on SessionHost.
*/
@JsonProperty(value = "agentVersion")
private String agentVersion;
/*
* Allow a new session.
*/
@JsonProperty(value = "allowNewSession")
private Boolean allowNewSession;
/*
* Virtual Machine Id of SessionHost's underlying virtual machine.
*/
@JsonProperty(value = "virtualMachineId", access = JsonProperty.Access.WRITE_ONLY)
private String virtualMachineId;
/*
* Resource Id of SessionHost's underlying virtual machine.
*/
@JsonProperty(value = "resourceId", access = JsonProperty.Access.WRITE_ONLY)
private String resourceId;
/*
* User assigned to SessionHost.
*/
@JsonProperty(value = "assignedUser")
private String assignedUser;
/*
* Status for a SessionHost.
*/
@JsonProperty(value = "status")
private Status status;
/*
* The timestamp of the status.
*/
@JsonProperty(value = "statusTimestamp", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime statusTimestamp;
/*
* The version of the OS on the session host.
*/
@JsonProperty(value = "osVersion")
private String osVersion;
/*
* The version of the side by side stack on the session host.
*/
@JsonProperty(value = "sxSStackVersion")
private String sxSStackVersion;
/*
* Update state of a SessionHost.
*/
@JsonProperty(value = "updateState")
private UpdateState updateState;
/*
* The timestamp of the last update.
*/
@JsonProperty(value = "lastUpdateTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime lastUpdateTime;
/*
* The error message.
*/
@JsonProperty(value = "updateErrorMessage")
private String updateErrorMessage;
/*
* List of SessionHostHealthCheckReports
*/
@JsonProperty(value = "sessionHostHealthCheckResults", access = JsonProperty.Access.WRITE_ONLY)
private List<SessionHostHealthCheckReport> sessionHostHealthCheckResults;
/**
* Get the objectId property: ObjectId of SessionHost. (internal use).
*
* @return the objectId value.
*/
public String objectId() {
return this.objectId;
}
/**
* Get the lastHeartBeat property: Last heart beat from SessionHost.
*
* @return the lastHeartBeat value.
*/
public OffsetDateTime lastHeartBeat() {
return this.lastHeartBeat;
}
/**
* Set the lastHeartBeat property: Last heart beat from SessionHost.
*
* @param lastHeartBeat the lastHeartBeat value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withLastHeartBeat(OffsetDateTime lastHeartBeat) {
this.lastHeartBeat = lastHeartBeat;
return this;
}
/**
* Get the sessions property: Number of sessions on SessionHost.
*
* @return the sessions value.
*/
public Integer sessions() {
return this.sessions;
}
/**
* Set the sessions property: Number of sessions on SessionHost.
*
* @param sessions the sessions value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withSessions(Integer sessions) {
this.sessions = sessions;
return this;
}
/**
* Get the agentVersion property: Version of agent on SessionHost.
*
* @return the agentVersion value.
*/
public String agentVersion() {
return this.agentVersion;
}
/**
* Set the agentVersion property: Version of agent on SessionHost.
*
* @param agentVersion the agentVersion value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withAgentVersion(String agentVersion) {
this.agentVersion = agentVersion;
return this;
}
/**
* Get the allowNewSession property: Allow a new session.
*
* @return the allowNewSession value.
*/
public Boolean allowNewSession() {
return this.allowNewSession;
}
/**
* Set the allowNewSession property: Allow a new session.
*
* @param allowNewSession the allowNewSession value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withAllowNewSession(Boolean allowNewSession) {
this.allowNewSession = allowNewSession;
return this;
}
/**
* Get the virtualMachineId property: Virtual Machine Id of SessionHost's underlying virtual machine.
*
* @return the virtualMachineId value.
*/
public String virtualMachineId() {
return this.virtualMachineId;
}
/**
* Get the resourceId property: Resource Id of SessionHost's underlying virtual machine.
*
* @return the resourceId value.
*/
public String resourceId() {
return this.resourceId;
}
/**
* Get the assignedUser property: User assigned to SessionHost.
*
* @return the assignedUser value.
*/
public String assignedUser() {
return this.assignedUser;
}
/**
* Set the assignedUser property: User assigned to SessionHost.
*
* @param assignedUser the assignedUser value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withAssignedUser(String assignedUser) {
this.assignedUser = assignedUser;
return this;
}
/**
* Get the status property: Status for a SessionHost.
*
* @return the status value.
*/
public Status status() {
return this.status;
}
/**
* Set the status property: Status for a SessionHost.
*
* @param status the status value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withStatus(Status status) {
this.status = status;
return this;
}
/**
* Get the statusTimestamp property: The timestamp of the status.
*
* @return the statusTimestamp value.
*/
public OffsetDateTime statusTimestamp() {
return this.statusTimestamp;
}
/**
* Get the osVersion property: The version of the OS on the session host.
*
* @return the osVersion value.
*/
public String osVersion() {
return this.osVersion;
}
/**
* Set the osVersion property: The version of the OS on the session host.
*
* @param osVersion the osVersion value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withOsVersion(String osVersion) {
this.osVersion = osVersion;
return this;
}
/**
* Get the sxSStackVersion property: The version of the side by side stack on the session host.
*
* @return the sxSStackVersion value.
*/
public String sxSStackVersion() {
return this.sxSStackVersion;
}
/**
* Set the sxSStackVersion property: The version of the side by side stack on the session host.
*
* @param sxSStackVersion the sxSStackVersion value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withSxSStackVersion(String sxSStackVersion) {
this.sxSStackVersion = sxSStackVersion;
return this;
}
/**
* Get the updateState property: Update state of a SessionHost.
*
* @return the updateState value.
*/
public UpdateState updateState() {
return this.updateState;
}
/**
* Set the updateState property: Update state of a SessionHost.
*
* @param updateState the updateState value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withUpdateState(UpdateState updateState) {
this.updateState = updateState;
return this;
}
/**
* Get the lastUpdateTime property: The timestamp of the last update.
*
* @return the lastUpdateTime value.
*/
public OffsetDateTime lastUpdateTime() {
return this.lastUpdateTime;
}
/**
* Get the updateErrorMessage property: The error message.
*
* @return the updateErrorMessage value.
*/
public String updateErrorMessage() {
return this.updateErrorMessage;
}
/**
* Set the updateErrorMessage property: The error message.
*
* @param updateErrorMessage the updateErrorMessage value to set.
* @return the SessionHostProperties object itself.
*/
public SessionHostProperties withUpdateErrorMessage(String updateErrorMessage) {
this.updateErrorMessage = updateErrorMessage;
return this;
}
/**
* Get the sessionHostHealthCheckResults property: List of SessionHostHealthCheckReports.
*
* @return the sessionHostHealthCheckResults value.
*/
public List<SessionHostHealthCheckReport> sessionHostHealthCheckResults() {
return this.sessionHostHealthCheckResults;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (sessionHostHealthCheckResults() != null) {
sessionHostHealthCheckResults().forEach(e -> e.validate());
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.TableStateManager;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState;
@InterfaceAudience.Private
public class DisableTableProcedure
extends AbstractStateMachineTableProcedure<DisableTableState> {
private static final Log LOG = LogFactory.getLog(DisableTableProcedure.class);
private TableName tableName;
private boolean skipTableStateCheck;
private Boolean traceEnabled = null;
public DisableTableProcedure() {
super();
}
/**
* Constructor
* @param env MasterProcedureEnv
* @param tableName the table to operate on
* @param skipTableStateCheck whether to check table state
*/
public DisableTableProcedure(final MasterProcedureEnv env, final TableName tableName,
final boolean skipTableStateCheck) {
this(env, tableName, skipTableStateCheck, null);
}
/**
* Constructor
* @param env MasterProcedureEnv
* @param tableName the table to operate on
* @param skipTableStateCheck whether to check table state
*/
public DisableTableProcedure(final MasterProcedureEnv env, final TableName tableName,
final boolean skipTableStateCheck, final ProcedurePrepareLatch syncLatch) {
super(env, syncLatch);
this.tableName = tableName;
this.skipTableStateCheck = skipTableStateCheck;
}
@Override
protected Flow executeFromState(final MasterProcedureEnv env, final DisableTableState state)
throws InterruptedException {
if (isTraceEnabled()) {
LOG.trace(this + " execute state=" + state);
}
try {
switch (state) {
case DISABLE_TABLE_PREPARE:
if (prepareDisable(env)) {
setNextState(DisableTableState.DISABLE_TABLE_PRE_OPERATION);
} else {
assert isFailed() : "disable should have an exception here";
return Flow.NO_MORE_STATE;
}
break;
case DISABLE_TABLE_PRE_OPERATION:
preDisable(env, state);
setNextState(DisableTableState.DISABLE_TABLE_SET_DISABLING_TABLE_STATE);
break;
case DISABLE_TABLE_SET_DISABLING_TABLE_STATE:
setTableStateToDisabling(env, tableName);
setNextState(DisableTableState.DISABLE_TABLE_MARK_REGIONS_OFFLINE);
break;
case DISABLE_TABLE_MARK_REGIONS_OFFLINE:
addChildProcedure(env.getAssignmentManager().createUnassignProcedures(tableName));
setNextState(DisableTableState.DISABLE_TABLE_SET_DISABLED_TABLE_STATE);
break;
case DISABLE_TABLE_SET_DISABLED_TABLE_STATE:
setTableStateToDisabled(env, tableName);
setNextState(DisableTableState.DISABLE_TABLE_POST_OPERATION);
break;
case DISABLE_TABLE_POST_OPERATION:
postDisable(env, state);
return Flow.NO_MORE_STATE;
default:
throw new UnsupportedOperationException("Unhandled state=" + state);
}
} catch (IOException e) {
if (isRollbackSupported(state)) {
setFailure("master-disable-table", e);
} else {
LOG.warn("Retriable error trying to disable table=" + tableName +
" (in state=" + state + ")", e);
}
}
return Flow.HAS_MORE_STATE;
}
@Override
protected void rollbackState(final MasterProcedureEnv env, final DisableTableState state)
throws IOException {
// nothing to rollback, prepare-disable is just table-state checks.
// We can fail if the table does not exist or is not disabled.
switch (state) {
case DISABLE_TABLE_PRE_OPERATION:
return;
case DISABLE_TABLE_PREPARE:
releaseSyncLatch();
return;
default:
break;
}
// The delete doesn't have a rollback. The execution will succeed, at some point.
throw new UnsupportedOperationException("Unhandled state=" + state);
}
@Override
protected boolean isRollbackSupported(final DisableTableState state) {
switch (state) {
case DISABLE_TABLE_PREPARE:
case DISABLE_TABLE_PRE_OPERATION:
return true;
default:
return false;
}
}
@Override
protected DisableTableState getState(final int stateId) {
return DisableTableState.valueOf(stateId);
}
@Override
protected int getStateId(final DisableTableState state) {
return state.getNumber();
}
@Override
protected DisableTableState getInitialState() {
return DisableTableState.DISABLE_TABLE_PREPARE;
}
@Override
protected void serializeStateData(ProcedureStateSerializer serializer)
throws IOException {
super.serializeStateData(serializer);
MasterProcedureProtos.DisableTableStateData.Builder disableTableMsg =
MasterProcedureProtos.DisableTableStateData.newBuilder()
.setUserInfo(MasterProcedureUtil.toProtoUserInfo(getUser()))
.setTableName(ProtobufUtil.toProtoTableName(tableName))
.setSkipTableStateCheck(skipTableStateCheck);
serializer.serialize(disableTableMsg.build());
}
@Override
protected void deserializeStateData(ProcedureStateSerializer serializer)
throws IOException {
super.deserializeStateData(serializer);
MasterProcedureProtos.DisableTableStateData disableTableMsg =
serializer.deserialize(MasterProcedureProtos.DisableTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(disableTableMsg.getUserInfo()));
tableName = ProtobufUtil.toTableName(disableTableMsg.getTableName());
skipTableStateCheck = disableTableMsg.getSkipTableStateCheck();
}
@Override
public TableName getTableName() {
return tableName;
}
@Override
public TableOperationType getTableOperationType() {
return TableOperationType.DISABLE;
}
/**
* Action before any real action of disabling table. Set the exception in the procedure instead
* of throwing it. This approach is to deal with backward compatible with 1.0.
* @param env MasterProcedureEnv
* @throws IOException
*/
private boolean prepareDisable(final MasterProcedureEnv env) throws IOException {
boolean canTableBeDisabled = true;
if (tableName.equals(TableName.META_TABLE_NAME)) {
setFailure("master-disable-table", new ConstraintException("Cannot disable catalog table"));
canTableBeDisabled = false;
} else if (!MetaTableAccessor.tableExists(env.getMasterServices().getConnection(), tableName)) {
setFailure("master-disable-table", new TableNotFoundException(tableName));
canTableBeDisabled = false;
} else if (!skipTableStateCheck) {
// There could be multiple client requests trying to disable or enable
// the table at the same time. Ensure only the first request is honored
// After that, no other requests can be accepted until the table reaches
// DISABLED or ENABLED.
//
// Note: in 1.0 release, we called TableStateManager.setTableStateIfInStates() to set
// the state to DISABLING from ENABLED. The implementation was done before table lock
// was implemented. With table lock, there is no need to set the state here (it will
// set the state later on). A quick state check should be enough for us to move forward.
TableStateManager tsm = env.getMasterServices().getTableStateManager();
TableState.State state = tsm.getTableState(tableName);
if (!state.equals(TableState.State.ENABLED)){
LOG.info("Table " + tableName + " isn't enabled;is "+state.name()+"; skipping disable");
setFailure("master-disable-table", new TableNotEnabledException(
tableName+" state is "+state.name()));
canTableBeDisabled = false;
}
}
// We are done the check. Future actions in this procedure could be done asynchronously.
releaseSyncLatch();
return canTableBeDisabled;
}
/**
* Action before disabling table.
* @param env MasterProcedureEnv
* @param state the procedure state
* @throws IOException
* @throws InterruptedException
*/
protected void preDisable(final MasterProcedureEnv env, final DisableTableState state)
throws IOException, InterruptedException {
runCoprocessorAction(env, state);
}
/**
* Mark table state to Disabling
* @param env MasterProcedureEnv
* @throws IOException
*/
protected static void setTableStateToDisabling(
final MasterProcedureEnv env,
final TableName tableName) throws IOException {
// Set table disabling flag up in zk.
env.getMasterServices().getTableStateManager().setTableState(
tableName,
TableState.State.DISABLING);
}
/**
* Mark table state to Disabled
* @param env MasterProcedureEnv
* @throws IOException
*/
protected static void setTableStateToDisabled(
final MasterProcedureEnv env,
final TableName tableName) throws IOException {
// Flip the table to disabled
env.getMasterServices().getTableStateManager().setTableState(
tableName,
TableState.State.DISABLED);
LOG.info("Disabled table, " + tableName + ", is completed.");
}
/**
* Action after disabling table.
* @param env MasterProcedureEnv
* @param state the procedure state
* @throws IOException
* @throws InterruptedException
*/
protected void postDisable(final MasterProcedureEnv env, final DisableTableState state)
throws IOException, InterruptedException {
runCoprocessorAction(env, state);
}
/**
* The procedure could be restarted from a different machine. If the variable is null, we need to
* retrieve it.
* @return traceEnabled
*/
private Boolean isTraceEnabled() {
if (traceEnabled == null) {
traceEnabled = LOG.isTraceEnabled();
}
return traceEnabled;
}
/**
* Coprocessor Action.
* @param env MasterProcedureEnv
* @param state the procedure state
* @throws IOException
* @throws InterruptedException
*/
private void runCoprocessorAction(final MasterProcedureEnv env, final DisableTableState state)
throws IOException, InterruptedException {
final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
if (cpHost != null) {
switch (state) {
case DISABLE_TABLE_PRE_OPERATION:
cpHost.preDisableTableAction(tableName, getUser());
break;
case DISABLE_TABLE_POST_OPERATION:
cpHost.postCompletedDisableTableAction(tableName, getUser());
break;
default:
throw new UnsupportedOperationException(this + " unhandled state=" + state);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets;
import static org.apache.geode.cache.Region.Entry;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.apache.geode.distributed.ConfigurationProperties.DURABLE_CLIENT_ID;
import static org.apache.geode.distributed.ConfigurationProperties.DURABLE_CLIENT_TIMEOUT;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.internal.AvailablePortHelper.getRandomAvailableTCPPort;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.Properties;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.InterestResultPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.client.Pool;
import org.apache.geode.cache.client.PoolManager;
import org.apache.geode.cache.client.internal.PoolImpl;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.cache.ClientServerObserverAdapter;
import org.apache.geode.internal.cache.ClientServerObserverHolder;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
/**
* Tests that the Matris defined in <code>ServerResponseMatrix</code> is applied or not
*
* @since GemFire 5.1
*/
@Category({ClientSubscriptionTest.class})
public class DurableResponseMatrixDUnitTest extends JUnit4DistributedTestCase {
protected static Cache cache = null;
VM server1 = null;
private static Integer PORT1;
private static final String REGION_NAME = "DurableResponseMatrixDUnitTest_region";
public static final String KEY = "KeyMatrix1";
@Override
public final void postSetUp() throws Exception {
final Host host = Host.getHost(0);
server1 = host.getVM(0);
// start servers first
PORT1 = ((Integer) server1.invoke(() -> DurableResponseMatrixDUnitTest.createServerCache()));
createCacheClient(NetworkUtils.getServerHostName(server1.getHost()));
// Disconnecting the client can cause this
IgnoredException.addIgnoredException("Connection reset||Unexpected IOException");
}
@Test
public void testRegisterInterestResponse_NonExistent_Invalid() throws Exception {
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntry(KEY));
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertFalse(r.containsValueForKey(KEY)); // invalidate
assertEquals(null, r.getEntry(KEY).getValue()); // invalidate
}
@Test
public void testRegisterInterestResponse_NonExistent_Valid() throws Exception {
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals("ValueMatrix1", r.getEntry(KEY).getValue());
}
@Ignore("TODO: test is broken and disabled")
@Test
public void testRegisterInterestResponse_Valid_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "ValueMatrix1");
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntry(KEY));
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals("ValueMatrix1", r.getEntry(KEY).getValue());
}
@Test
public void testRegisterInterestResponse_Valid_Valid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "ValueMatrix1");
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals("ValueMatrix1", r.getEntry(KEY).getValue());
}
@Test
public void testRegisterInterestResponse_Invalid_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntry(KEY));
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals(null, r.getEntry(KEY).getValue());
}
@Ignore("TODO: test is broken and disabled")
@Test
public void testRegisterInterestResponse_Invalid_Valid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals("ValueMatrix1", r.getEntry(KEY).getValue());
}
@Test
public void testRegisterInterestResponse_Destroyed_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
r.destroy(KEY);
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntry(KEY));
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertFalse(r.containsValueForKey(KEY)); // invalidate
assertEquals(null, r.getEntry(KEY).getValue()); // invalidate
}
@Test
public void testRegisterInterestResponse_Destroyed_Valid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
r.destroy(KEY);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals("ValueMatrix1", r.getEntry(KEY).getValue());
}
@Test
public void testRegisterInterest_Destroy_Concurrent() throws Exception {
PoolImpl.BEFORE_REGISTER_CALLBACK_FLAG = true;
ClientServerObserverHolder.setInstance(new ClientServerObserverAdapter() {
@Override
public void beforeInterestRegistration() {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "AgainDummyValue");
r.destroy(KEY);
PoolImpl.BEFORE_REGISTER_CALLBACK_FLAG = false;
}
});
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
r.registerInterest(KEY, InterestResultPolicy.KEYS_VALUES);
assertEquals(null, r.getEntry(KEY));
}
private void waitForValue(final Region r, final Object key, final Object expected) {
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
Entry entry = r.getEntry(KEY);
if (expected == null) {
if (!r.containsValueForKey(key)) {
return true; // success!
}
} else {
if (entry != null) {
if (expected.equals(entry.getValue())) {
return true;
}
}
}
return false;
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
}
@Test
public void testNotification_NonExistent_Create() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
waitForValue(r, KEY, "ValueMatrix1");
}
@Test
public void testNotification_NonExistent_Update() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix2"));
waitForValue(r, KEY, "ValueMatrix2");
}
@Test
public void testNotification_NonExistent_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntryOnly(KEY));
waitForValue(r, KEY, null); // invalidate
}
@Test
public void testNotification_NonExistent_Destroy() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.destroyEntry(KEY));
waitForValue(r, KEY, null); // destroyed
}
@Test
public void testNotification_Valid_Create() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
waitForValue(r, KEY, "ValueMatrix1");
}
@Test
public void testNotification_Valid_Update() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix2"));
waitForValue(r, KEY, "ValueMatrix2");
}
@Ignore("TODO: test is broken and disabled")
@Test
public void testNotification_Valid_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntryOnly(KEY));
waitForValue(r, KEY, null); // invalidate
}
@Test
public void testNotification_Valid_Destroy() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
r.put(KEY, "DummyValue");
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.destroyEntry(KEY));
waitForValue(r, KEY, null); // destroyed
}
@Test
public void testNotification_Invalid_Create() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
waitForValue(r, KEY, "ValueMatrix1");
}
@Test
public void testNotification_Invalid_Update() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
waitForValue(r, KEY, "ValueMatrix1");
}
@Ignore("TODO: test is broken and disabled")
@Test
public void testNotification_Invalid_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntryOnly(KEY));
waitForValue(r, KEY, null); // invalidate
}
@Test
public void testNotification_Invalid_Destroy() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.destroyEntry(KEY));
waitForValue(r, KEY, null); // destroyed
}
@Test
public void testNotification_LocalInvalid_Create() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
localInvalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
waitForValue(r, KEY, "ValueMatrix1");
}
@Test
public void testNotification_LocalInvalid_Update() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
localInvalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.updateEntry(KEY, "ValueMatrix1"));
waitForValue(r, KEY, "ValueMatrix1");
}
@Ignore("TODO: test is broken and disabled")
@Test
public void testNotification_LocalInvalid_Invalid() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
localInvalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.invalidateEntryOnly(KEY));
waitForValue(r, KEY, null); // invalidate
}
@Test
public void testNotification_LocalInvalid_Destroy() throws Exception {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
invalidateEntry(KEY);
r.registerInterest("ALL_KEYS", InterestResultPolicy.NONE);
server1.invoke(() -> DurableResponseMatrixDUnitTest.destroyEntry(KEY));
waitForValue(r, KEY, null); // destroyed
}
public static void updateEntry(String key, String value) throws Exception {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.put(key, value);
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
public static void destroyEntry(String key) throws Exception {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.destroy(key);
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
public static void invalidateEntryOnly(String key) throws Exception {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.invalidate(key);
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
public static void invalidateEntry(String key) throws Exception {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.put(key, "DummyValue");
r.invalidate(key);
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
public static void localInvalidateEntry(String key) throws Exception {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.put(key, "DummyValue");
r.localInvalidate(key);
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
private void createCache(Properties props) {
try {
DistributedSystem ds = getSystem(props);
assertNotNull(ds);
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
private void createCacheClient(String host) {
try {
final String durableClientId = "DurableResponseMatrixDUnitTest_client";
final int durableClientTimeout = 60; // keep the client alive for 60 s
Properties props =
getClientDistributedSystemProperties(durableClientId, durableClientTimeout);
new DurableResponseMatrixDUnitTest().createCache(props);
Pool p =
PoolManager.createFactory().addServer(host, PORT1.intValue()).setSubscriptionEnabled(true)
.setSubscriptionRedundancy(1).setReadTimeout(10000).setMinConnections(2)
// .setRetryInterval(2000)
.create("DurableResponseMatrixDUnitTestPool");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setPoolName(p.getName());
RegionAttributes attrs = factory.create();
Region r = cache.createRegion(REGION_NAME, attrs);
assertNotNull(r);
cache.readyForEvents();
} catch (Exception e) {
Assert.fail("test failed due to ", e);
}
}
public static Integer createServerCache() throws Exception {
Properties props = new Properties();
new DurableResponseMatrixDUnitTest().createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
Region r = cache.createRegion(REGION_NAME, attrs);
assertNotNull(r);
CacheServer server1 = cache.addCacheServer();
int port = getRandomAvailableTCPPort();
server1.setPort(port);
server1.setNotifyBySubscription(true);
server1.start();
return new Integer(server1.getPort());
}
private Properties getClientDistributedSystemProperties(String durableClientId,
int durableClientTimeout) {
Properties properties = new Properties();
properties.setProperty(MCAST_PORT, "0");
properties.setProperty(LOCATORS, "");
properties.setProperty(DURABLE_CLIENT_ID, durableClientId);
properties.setProperty(DURABLE_CLIENT_TIMEOUT, String.valueOf(durableClientTimeout));
return properties;
}
@Override
public final void preTearDown() throws Exception {
// close the clients first
closeCache();
// then close the servers
server1.invoke(() -> DurableResponseMatrixDUnitTest.closeCache());
}
public static void closeCache() {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
}
|
|
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.filter.convolve.noborder;
import boofcv.generate.AutoTypeImage;
import boofcv.generate.CodeGeneratorBase;
import java.io.FileNotFoundException;
/**
* Code generator for ConvolveImageStandard_SB.
*
* @author Peter Abeles
*/
public class GenerateConvolveImageStandard_SB extends CodeGeneratorBase {
String kernelType;
String inputType;
String outputType;
String kernelData;
String inputData;
String outputData;
String sumType;
String typeCast;
String bitWise;
String workType;
boolean hasDivide;
@Override
public void generateCode() throws FileNotFoundException {
printPreamble();
printAllOps(AutoTypeImage.F32, AutoTypeImage.F32, false);
// printAllOps(AutoTypeImage.F32, AutoTypeImage.F32, false, true);
printAllOps(AutoTypeImage.F64, AutoTypeImage.F64, false);
printAllOps(AutoTypeImage.U8, AutoTypeImage.I16, false);
printAllOps(AutoTypeImage.U8, AutoTypeImage.S32, false);
printAllOps(AutoTypeImage.U16, AutoTypeImage.I8, true, true);
printAllOps(AutoTypeImage.S16, AutoTypeImage.I16, false);
printAllOps(AutoTypeImage.U8, AutoTypeImage.I8, true);
// printAllOps(AutoTypeImage.U8,AutoTypeImage.I8,false, true);
printAllOps(AutoTypeImage.S16, AutoTypeImage.I16, true);
printAllOps(AutoTypeImage.U16, AutoTypeImage.I16, false);
printAllOps(AutoTypeImage.U16, AutoTypeImage.I16, true);
printAllOps(AutoTypeImage.S32, AutoTypeImage.I16, true, true);
printAllOps(AutoTypeImage.S32, AutoTypeImage.S32, false);
printAllOps(AutoTypeImage.S32, AutoTypeImage.S32, true);
out.println("}");
}
private void printPreamble() {
autoSelectName();
out.print("import pabeles.concurrency.GrowArray;\n" +
"import boofcv.misc.BoofMiscOps;\n" +
"import boofcv.struct.convolve.*;\n" +
"import boofcv.struct.image.*;\n" +
"import org.ddogleg.struct.DogArray_I32;\n" +
"import org.jetbrains.annotations.Nullable;\n" +
"\n" +
"import javax.annotation.Generated;\n" +
"import java.util.Arrays;\n");
out.println();
out.print("//CONCURRENT_INLINE import boofcv.concurrency.BoofConcurrency;\n");
out.println();
out.println();
out.print("/**\n" +
" * <p>\n" +
" * Standard algorithms with no fancy optimization for convolving 1D and 2D kernels across an image.\n" +
" * </p>\n" +
generateDocString("Peter Abeles") +
"@SuppressWarnings({\"ForLoopReplaceableByForEach\",\"Duplicates\"})\n" +
"public class " + className + " {\n\n");
}
private void printAllOps( AutoTypeImage input, AutoTypeImage output, boolean hasDivide ) {
printAllOps(input, output, hasDivide, false);
}
private void printAllOps( AutoTypeImage input, AutoTypeImage output, boolean hasDivide,
boolean justVertical ) {
typeCast = output.getTypeCastFromSum();
kernelType = input.getKernelType();
inputType = input.getSingleBandName();
outputType = output.getSingleBandName();
kernelData = input.getKernelDataType();
inputData = input.getDataType();
outputData = output.getDataType();
sumType = input.getSumType();
bitWise = input.getBitWise();
workType = ("DogArray_" + input.getKernelType()).replace("S32", "I32");
this.hasDivide = hasDivide;
// just for anal retentive formatting
if (!bitWise.isEmpty())
bitWise = " " + bitWise;
if (justVertical) {
if (hasDivide)
printVertical_div();
else
printVertical();
} else {
printHorizontal();
if (hasDivide)
printVertical_div();
else
printVertical();
if (hasDivide)
printConvolve2D_div();
else
printConvolve2D();
}
}
private void printHorizontal() {
String paramDiv = hasDivide ? " , int divisor" : "";
String totalDiv = hasDivide ? "((total + halfDivisor)/divisor)" : "total";
out.print("\tpublic static void horizontal( Kernel1D_" + kernelType + " kernel, " +
inputType + " src, " + outputType + " dst" + paramDiv + " ) {\n" +
"\t\tfinal " + inputData + "[] dataSrc = src.data;\n" +
"\t\tfinal " + outputData + "[] dataDst = dst.data;\n" +
"\t\tfinal " + kernelData + "[] dataKer = kernel.data;\n" +
"\n" +
"\t\tfinal int offset = kernel.getOffset();\n" +
"\t\tfinal int kernelWidth = kernel.getWidth();\n");
if (hasDivide)
out.print("\t\tfinal int halfDivisor = divisor/2;\n");
out.print("\n" +
"\t\tfinal int width = src.getWidth();\n");
String body = "\t\t\tint indexDst = dst.startIndex + i*dst.stride + offset;\n" +
"\t\t\tint j = src.startIndex + i*src.stride;\n" +
"\t\t\tfinal int jEnd = j + width - (kernelWidth - 1);\n" +
"\n" +
"\t\t\tfor (; j < jEnd; j++) {\n" +
"\t\t\t\t" + sumType + " total = 0;\n" +
"\t\t\t\tint indexSrc = j;\n" +
"\t\t\t\tfor (int k = 0; k < kernelWidth; k++) {\n" +
"\t\t\t\t\ttotal += (dataSrc[indexSrc++]" + bitWise + ")*dataKer[k];\n" +
"\t\t\t\t}\n" +
"\t\t\t\tdataDst[indexDst++] = " + typeCast + totalDiv + ";\n" +
"\t\t\t}\n";
printParallel("i", "0", "src.height", body);
out.print("\t}\n\n");
}
private void printVertical_div() {
out.print("\tpublic static void vertical( Kernel1D_" + kernelType + " kernel,\n" +
"\t\t\t\t\t\t\t\t " + inputType + " src, " + outputType + " dst, int divisor, @Nullable GrowArray<DogArray_I32> workspaces ) {\n" +
"\t\tworkspaces = BoofMiscOps.checkDeclare(workspaces, DogArray_I32::new);\n" +
"\t\tfinal DogArray_I32 work = workspaces.grow(); //CONCURRENT_REMOVE_LINE\n" +
"\t\tfinal " + inputData + "[] dataSrc = src.data;\n" +
"\t\tfinal " + outputData + "[] dataDst = dst.data;\n" +
"\t\tfinal " + kernelData + "[] dataKer = kernel.data;\n" +
"\n" +
"\t\tfinal int offset = kernel.getOffset();\n" +
"\t\tfinal int kernelWidth = kernel.getWidth();\n" +
"\t\tfinal int halfDivisor = divisor/2;\n" +
"\t\tfinal double divisionHack = 1.0/divisor; // WTF integer division is slower than converting to a float??\n" +
"\n" +
"\t\tfinal int imgWidth = dst.getWidth();\n" +
"\t\tfinal int imgHeight = dst.getHeight();\n" +
"\t\tfinal int yEnd = imgHeight - (kernelWidth - offset - 1);\n");
String body = "";
body += "\t\t" + sumType + "[] totalRow = BoofMiscOps.checkDeclare(work, imgWidth, true);\n" +
"\t\tfor (int y = y0; y < y1; y++) {\n" +
"\t\t\tfor (int k = 0; k < kernelWidth; k++) {\n" +
"\t\t\t\tfinal int kernelValue = dataKer[k];\n" +
"\t\t\t\tint indexSrc = src.startIndex + (y - offset + k)*src.stride;\n" +
"\t\t\t\tfor (int i = 0; i < imgWidth; i++) {\n" +
"\t\t\t\t\ttotalRow[i] += ((dataSrc[indexSrc++]" + bitWise + ")*kernelValue);\n" +
"\t\t\t\t}\n" +
"\t\t\t}\n" +
"\n" +
"\t\t\tint indexDst = dst.startIndex + y*dst.stride;\n" +
"\t\t\tfor (int i = 0; i < imgWidth; i++) {\n" +
"\t\t\t\tdataDst[indexDst++] = (" + outputData + ")((totalRow[i] + halfDivisor)*divisionHack);\n" +
"\t\t\t}\n" +
"\t\t\tArrays.fill(totalRow,0,imgWidth,0);\n" +
"\t\t}\n";
printParallelBlock("y0", "y1", "offset", "yEnd", null, body);
out.print("\t}\n\n");
}
/**
* If a divisor isn't needed then the image can be processed in a way which minimizes cache misses. It's assumed
* the output image can sum up without overflowing. This would be an issue even if an int is used to sum.
*/
private void printVertical() {
out.print("\tpublic static void vertical( Kernel1D_" + kernelType + " kernel, " + inputType + " src, " + outputType + " dst ) {\n" +
"\t\tfinal " + inputData + "[] dataSrc = src.data;\n" +
"\t\tfinal " + outputData + "[] dataDst = dst.data;\n" +
"\t\tfinal " + kernelData + "[] dataKer = kernel.data;\n" +
"\n" +
"\t\tfinal int offset = kernel.getOffset();\n" +
"\t\tfinal int kernelWidth = kernel.getWidth();\n" +
"\n" +
"\t\tfinal int imgWidth = dst.getWidth();\n" +
"\t\tfinal int imgHeight = dst.getHeight();\n" +
"\t\tfinal int yEnd = imgHeight - (kernelWidth - offset - 1);\n");
String body = "";
body += "\t\t\tfinal int indexDstStart = dst.startIndex + y*dst.stride;\n" +
"\t\t\tArrays.fill(dataDst, indexDstStart, indexDstStart + imgWidth, (" + outputData + ")0);\n" +
"\n" +
"\t\t\tfor (int k = 0; k < kernelWidth; k++) {\n" +
"\t\t\t\tfinal int iStart = src.startIndex + (y - offset + k)*src.stride;\n" +
"\t\t\t\tfinal int iEnd = iStart + imgWidth;\n" +
"\t\t\t\tint indexDst = indexDstStart;\n" +
"\t\t\t\t" + kernelData + " kernelValue = dataKer[k];\n" +
"\t\t\t\tfor (int i = iStart; i < iEnd; i++) {\n" +
"\t\t\t\t\tdataDst[indexDst++] += " + typeCast + "((dataSrc[i]" + bitWise + ")*kernelValue);\n" +
"\t\t\t\t}\n" +
"\t\t\t}\n";
printParallel("y", "offset", "yEnd", body);
out.print("\t}\n\n");
}
private void printConvolve2D() {
String paramDiv = hasDivide ? ", int divisor" : "";
String totalDiv = hasDivide ? "((total+halfDivisor)/divisor)" : "total";
String performBound = "";
out.print("\tpublic static void convolve( Kernel2D_" + kernelType + " kernel, " + inputType + " src, " + outputType + " dest" + paramDiv + " ) {\n" +
"\t\tfinal " + kernelData + "[] dataKernel = kernel.data;\n" +
"\t\tfinal " + inputData + "[] dataSrc = src.data;\n" +
"\t\tfinal " + outputData + "[] dataDst = dest.data;\n" +
"\n" +
"\t\tfinal int width = src.getWidth();\n" +
"\t\tfinal int height = src.getHeight();\n");
if (hasDivide)
out.print("\t\tfinal int halfDivisor = divisor/2;\n");
out.print("\n" +
"\t\tint offsetL = kernel.offset;\n" +
"\t\tint offsetR = kernel.width - kernel.offset - 1;\n");
String body = "";
body += "\t\t\tint indexDst = dest.startIndex + y*dest.stride + offsetL;\n" +
"\t\t\tfor (int x = offsetL; x < width - offsetR; x++) {\n" +
"\t\t\t\t" + sumType + " total = 0;\n" +
"\t\t\t\tint indexKer = 0;\n" +
"\t\t\t\tfor (int ki = 0; ki < kernel.width; ki++) {\n" +
"\t\t\t\t\tint indexSrc = src.startIndex + (y + ki - offsetL)*src.stride + x - offsetL;\n" +
"\t\t\t\t\tfor (int kj = 0; kj < kernel.width; kj++) {\n" +
"\t\t\t\t\t\ttotal += (dataSrc[indexSrc + kj]" + bitWise + ")*dataKernel[indexKer++];\n" +
"\t\t\t\t\t}\n" +
"\t\t\t\t}\n" +
performBound +
"\t\t\t\tdataDst[indexDst++] = " + typeCast + totalDiv + ";\n" +
"\t\t\t}\n";
printParallel("y", "offsetL", "height - offsetR", body);
out.print("\t}\n\n");
}
private void printConvolve2D_div() {
out.print("\tpublic static void convolve( Kernel2D_" + kernelType + " kernel, " + inputType + " src, " +
outputType + " dest, int divisor, @Nullable GrowArray<" + workType + "> workspaces ) {\n" +
"\t\tworkspaces = BoofMiscOps.checkDeclare(workspaces, " + workType + "::new);\n" +
"\t\tfinal " + workType + " work = workspaces.grow(); //CONCURRENT_REMOVE_LINE\n" +
"\t\tfinal " + kernelData + "[] dataKernel = kernel.data;\n" +
"\t\tfinal " + inputData + "[] dataSrc = src.data;\n" +
"\t\tfinal " + outputData + "[] dataDst = dest.data;\n" +
"\n" +
"\t\tfinal int width = src.getWidth();\n" +
"\t\tfinal int height = src.getHeight();\n" +
"\t\tfinal int halfDivisor = divisor/2;\n" +
"\n" +
"\t\tint offsetL = kernel.offset;\n" +
"\t\tint offsetR = kernel.width - kernel.offset - 1;\n" +
"\n" +
"\t\t//CONCURRENT_BELOW BoofConcurrency.loopBlocks(offsetL, height - offsetR,kernel.width, workspaces, (work,y0,y1) -> {\n" +
"\t\tfinal int y0 = offsetL, y1 = height - offsetR;\n" +
"\t\t" + sumType + " totalRow[] = BoofMiscOps.checkDeclare(work, src.width, false);\n" +
"\t\tfor (int y = y0; y < y1; y++) {\n" +
"\t\t\tint indexSrcRow = src.startIndex + (y - offsetL)*src.stride - offsetL;\n" +
"\t\t\tfor (int x = offsetL; x < width - offsetR; x++) {\n" +
"\t\t\t\tint indexSrc = indexSrcRow + x;\n" +
"\n" +
"\t\t\t\t" + sumType + " total = 0;\n" +
"\t\t\t\tfor (int k = 0; k < kernel.width; k++) {\n" +
"\t\t\t\t\ttotal += (dataSrc[indexSrc++]" + bitWise + ")*dataKernel[k];\n" +
"\t\t\t\t}\n" +
"\t\t\t\ttotalRow[x] = total;\n" +
"\t\t\t}\n" +
"\n" +
"\t\t\t// rest of the convolution rows are an addition\n" +
"\t\t\tfor (int i = 1; i < kernel.width; i++) {\n" +
"\t\t\t\tindexSrcRow = src.startIndex + (y + i - offsetL)*src.stride - offsetL;\n" +
"\t\t\t\tint indexKer = i*kernel.width;\n" +
"\n" +
"\t\t\t\tfor (int x = offsetL; x < width - offsetR; x++) {\n" +
"\t\t\t\t\tint indexSrc = indexSrcRow + x;\n" +
"\n" +
"\t\t\t\t\t" + sumType + " total = 0;\n" +
"\t\t\t\t\tfor (int k = 0; k < kernel.width; k++) {\n" +
"\t\t\t\t\t\ttotal += (dataSrc[indexSrc++]" + bitWise + ")*dataKernel[indexKer + k];\n" +
"\t\t\t\t\t}\n" +
"\n" +
"\t\t\t\t\ttotalRow[x] += total;\n" +
"\t\t\t\t}\n" +
"\t\t\t}\n" +
"\t\t\tint indexDst = dest.startIndex + y*dest.stride + offsetL;\n" +
"\t\t\tfor (int x = offsetL; x < width - offsetR; x++) {\n" +
"\t\t\t\tdataDst[indexDst++] = " + typeCast + "((totalRow[x] + halfDivisor)/divisor);\n" +
"\t\t\t}\n" +
"\t\t}\n" +
"\t\t//CONCURRENT_INLINE });\n" +
"\t}\n");
}
public static void main( String[] args ) throws FileNotFoundException {
var gen = new GenerateConvolveImageStandard_SB();
gen.setModuleName("boofcv-ip");
gen.parseArguments(args);
gen.generateCode();
}
}
|
|
package com.app.herydevelopments.blocks;
import android.content.Context;
import android.graphics.Paint;
import android.support.v4.content.res.ResourcesCompat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class DataWorlds {
/*
* Information detail (mInformation)
*
* 0 : Empty Block
* -1 : Wall Block
* 1 : Blue Block
* 2 : Blue Objective Block
* 3 : Green Block
* 4 : Green Objective Block
* 5 : Purple Block
* 6 : Purple Objective Block
* 7 : Red Block
* 8 : Red Objective Block
* 9 : Yellow Block
* 10 : Yellow Objective Block
* 11 : Orange Block
* 12 : Orange Objective Block
*
* */
private Context mContext;
private String mInformation;
public DataWorlds(Context context, String information) {
this.mContext = context;
this.mInformation = information;
}
public Block[][] LevelManager(int boardLength) {
Block[][] result = new Block[0][0];
List<String> items;
if(!mInformation.equals("")){
items = Arrays.asList(mInformation.split(","));
}else{
items = new ArrayList<>();
}
if (boardLength == 4) {
result = new Block[4][4];
} else if (boardLength == 6) {
result = new Block[6][6];
} else if (boardLength == 8) {
result = new Block[8][8];
} else {
// Error
}
int index = 0;
for (int i = 0; i < result.length; i++) {
for (int y = 0; y < result[i].length; y++) {
Block block = new Block(mContext);
if(items.size() > index) {
this.InitializeBlock(block, Integer.parseInt(items.get(index)));
}
result[i][y] = block;
index = index + 1;
}
}
return result;
}
private void SetObjectiveBlockColor(Block block, String color) {
if (color.equals("Green")) {
block.setColor(R.drawable.block_fin_color_green);
block.setColorObjetivo(R.drawable.block_color_green);
block.setColorFin(R.drawable.block_fin_color_green);
block.setBlockFin(true);
block.setIsEmpty(true);
return;
}
if (color.equals("Blue")) {
block.setColor(R.drawable.block_fin_color_blue);
block.setColorObjetivo(R.drawable.block_color_blue);
block.setColorFin(R.drawable.block_fin_color_blue);
block.setBlockFin(true);
block.setIsEmpty(true);
return;
}
if (color.equals("Red")) {
block.setColor(R.drawable.block_fin_color_red);
block.setColorObjetivo(R.drawable.block_color_red);
block.setColorFin(R.drawable.block_fin_color_red);
block.setBlockFin(true);
block.setIsEmpty(true);
return;
}
if (color.equals("Purple")) {
block.setColor(R.drawable.block_fin_color_purple);
block.setColorObjetivo(R.drawable.block_color_purple);
block.setColorFin(R.drawable.block_fin_color_purple);
block.setBlockFin(true);
block.setIsEmpty(true);
}
if (color.equals("Yellow")) {
block.setColor(R.drawable.block_fin_color_yellow);
block.setColorObjetivo(R.drawable.block_color_yellow);
block.setColorFin(R.drawable.block_fin_color_yellow);
block.setBlockFin(true);
block.setIsEmpty(true);
}
if (color.equals("Orange")) {
block.setColor(R.drawable.block_fin_color_orange);
block.setColorObjetivo(R.drawable.block_color_orange);
block.setColorFin(R.drawable.block_fin_color_orange);
block.setBlockFin(true);
block.setIsEmpty(true);
}
}
private void SetBlockColor(Block block, String color) {
if (color.equals("Green")) {
block.setColor(R.drawable.block_color_green);
block.setIsEmpty(false);
return;
}
if (color.equals("Blue")) {
block.setColor(R.drawable.block_color_blue);
block.setIsEmpty(false);
return;
}
if (color.equals("Red")) {
block.setColor(R.drawable.block_color_red);
block.setIsEmpty(false);
return;
}
if (color.equals("Purple")) {
block.setColor(R.drawable.block_color_purple);
block.setIsEmpty(false);
return;
}
if (color.equals("Yellow")) {
block.setColor(R.drawable.block_color_yellow);
block.setIsEmpty(false);
return;
}
if (color.equals("Orange")) {
block.setColor(R.drawable.block_color_orange);
block.setIsEmpty(false);
return;
}
if (color.equals("Wall")) {
block.setColor(R.drawable.block_color_wall);
block.setIsEmpty(false);
block.setCanMove(false);
}
}
private void InitializeBlock(Block block, int indicator) {
switch (indicator) {
case -1:
block.setFormIndicator(-1);
this.SetBlockColor(block, "Wall");
break;
case 0:
// Nothing
block.setFormIndicator(0);
break;
case 1:
block.setFormIndicator(1);
this.SetBlockColor(block, "Blue");
break;
case 2:
block.setFormIndicator(2);
this.SetObjectiveBlockColor(block, "Blue");
break;
case 3:
block.setFormIndicator(3);
this.SetBlockColor(block, "Green");
break;
case 4:
block.setFormIndicator(4);
this.SetObjectiveBlockColor(block, "Green");
break;
case 5:
block.setFormIndicator(5);
this.SetBlockColor(block, "Purple");
break;
case 6:
block.setFormIndicator(6);
this.SetObjectiveBlockColor(block, "Purple");
break;
case 7:
block.setFormIndicator(7);
this.SetBlockColor(block, "Red");
break;
case 8:
block.setFormIndicator(8);
this.SetObjectiveBlockColor(block, "Red");
break;
case 9:
block.setFormIndicator(9);
this.SetBlockColor(block, "Yellow");
break;
case 10:
block.setFormIndicator(10);
this.SetObjectiveBlockColor(block, "Yellow");
break;
case 11:
block.setFormIndicator(11);
this.SetBlockColor(block, "Orange");
break;
case 12:
block.setFormIndicator(12);
this.SetObjectiveBlockColor(block, "Orange");
break;
}
}
public BlockColor[][] LevelManagerColors() {
BlockColor[][] result = new BlockColor[0][0];
List<String> items = Arrays.asList(mInformation.split(","));
if (items.size() == 16) {
result = new BlockColor[4][4];
} else if (items.size() == 36) {
result = new BlockColor[6][6];
} else if (items.size() == 64) {
result = new BlockColor[8][8];
} else {
// Error
}
int index = 0;
for (int i = 0; i < result.length; i++) {
for (int y = 0; y < result[i].length; y++) {
BlockColor blockColor = new BlockColor();
this.InitializeColorBlock(blockColor, Integer.parseInt(items.get(index)));
result[i][y] = blockColor;
index = index + 1;
}
}
return result;
}
private void InitializeColorBlock(BlockColor blockColor, int indicator) {
blockColor.setRadio(6);
switch (indicator) {
case -1:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorGreyWall, null));
blockColor.setStyle(Paint.Style.FILL);
blockColor.setRadio(0);
break;
case 0:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorGrey, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 1:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorBlue, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 2:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorBlue, null));
blockColor.setStyle(Paint.Style.STROKE);
break;
case 3:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorGreen, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 4:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorGreen, null));
blockColor.setStyle(Paint.Style.STROKE);
break;
case 5:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorPurple, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 6:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorPurple, null));
blockColor.setStyle(Paint.Style.STROKE);
break;
case 7:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorRed, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 8:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorRed, null));
blockColor.setStyle(Paint.Style.STROKE);
break;
case 9:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorYellow, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 10:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorYellow, null));
blockColor.setStyle(Paint.Style.STROKE);
break;
case 11:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorOrange, null));
blockColor.setStyle(Paint.Style.FILL);
break;
case 12:
blockColor.setColor(ResourcesCompat.getColor(mContext.getResources(), R.color.colorOrange, null));
blockColor.setStyle(Paint.Style.STROKE);
break;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.lang.reflect;
import com.android.dex.Dex;
import java.lang.annotation.Annotation;
import java.util.Comparator;
import java.util.List;
import libcore.reflect.AnnotationAccess;
import libcore.reflect.GenericSignatureParser;
import libcore.reflect.Types;
/**
* This class represents a field. Information about the field can be accessed,
* and the field's value can be accessed dynamically.
*/
public final class Field extends AccessibleObject implements Member {
/**
* Orders fields by their name and declaring class.
*
* @hide
*/
public static final Comparator<Field> ORDER_BY_NAME_AND_DECLARING_CLASS
= new Comparator<Field>() {
@Override public int compare(Field a, Field b) {
if (a == b) {
return 0;
}
int comparison = a.getName().compareTo(b.getName());
if (comparison != 0) {
return comparison;
}
Class<?> aType = a.getDeclaringClass();
Class<?> bType = b.getDeclaringClass();
if (aType == bType) {
return 0;
} else {
return aType.getName().compareTo(bType.getName());
}
}
};
private final ArtField artField;
/**
* @hide
*/
public Field(ArtField artField) {
if (artField == null) {
throw new NullPointerException("artField == null");
}
this.artField = artField;
}
/**
* Returns the modifiers for this field. The {@link Modifier} class should
* be used to decode the result.
*
* @return the modifiers for this field
* @see Modifier
*/
@Override public int getModifiers() {
return artField.getAccessFlags() & 0xffff; // mask out bits not used by Java
}
/**
* Indicates whether or not this field is an enumeration constant.
*
* @return {@code true} if this field is an enumeration constant, {@code
* false} otherwise
*/
public boolean isEnumConstant() {
return (artField.getAccessFlags() & Modifier.ENUM) != 0;
}
/**
* Indicates whether or not this field is synthetic.
*
* @return {@code true} if this field is synthetic, {@code false} otherwise
*/
@Override public boolean isSynthetic() {
return (artField.getAccessFlags() & Modifier.SYNTHETIC) != 0;
}
/**
* Returns the name of this field.
*
* @return the name of this field
*/
@Override public String getName() {
return artField.getName();
}
@Override public Class<?> getDeclaringClass() {
return artField.getDeclaringClass();
}
/**
* Return the {@link Class} associated with the type of this field.
*
* @return the type of this field
*/
public Class<?> getType() {
return artField.getType();
}
/**
* Returns the index of this field's ID in its dex file.
*
* @hide
*/
public int getDexFieldIndex() {
return artField.getDexFieldIndex();
}
/**
* Returns the offset of the field within an instance, or for static fields, the class.
*
* @hide
*/
public int getOffset() {
return artField.getOffset();
}
/**
* {@inheritDoc}
*
* <p>Equivalent to {@code getDeclaringClass().getName().hashCode() ^ getName().hashCode()}.
*/
@Override public int hashCode() {
return getDeclaringClass().getName().hashCode() ^ getName().hashCode();
}
/**
* Returns true if {@code other} has the same declaring class, name and type
* as this field.
*/
@Override public boolean equals(Object other) {
if (!(other instanceof Field)) {
return false;
}
// exactly one instance of each member in this runtime
return this.artField == ((Field) other).artField;
}
/**
* Returns the string representation of this field, including the field's
* generic type.
*
* @return the string representation of this field
*/
public String toGenericString() {
StringBuilder sb = new StringBuilder(80);
// append modifiers if any
int modifier = getModifiers();
if (modifier != 0) {
sb.append(Modifier.toString(modifier)).append(' ');
}
// append generic type
Types.appendGenericType(sb, getGenericType());
sb.append(' ');
// append full field name
sb.append(getDeclaringClass().getName()).append('.').append(getName());
return sb.toString();
}
/**
* Returns the generic type of this field.
*
* @return the generic type
* @throws GenericSignatureFormatError
* if the generic field signature is invalid
* @throws TypeNotPresentException
* if the generic type points to a missing type
* @throws MalformedParameterizedTypeException
* if the generic type points to a type that cannot be
* instantiated for some reason
*/
public Type getGenericType() {
String signatureAttribute = AnnotationAccess.getSignature(this);
Class<?> declaringClass = getDeclaringClass();
ClassLoader cl = declaringClass.getClassLoader();
GenericSignatureParser parser = new GenericSignatureParser(cl);
parser.parseForField(declaringClass, signatureAttribute);
Type genericType = parser.fieldType;
if (genericType == null) {
genericType = getType();
}
return genericType;
}
/**
* Returns the constructor's signature in non-printable form. This is called
* (only) from IO native code and needed for deriving the serialVersionUID
* of the class
*/
@SuppressWarnings("unused")
private String getSignature() {
return Types.getSignature(getType());
}
@Override public Annotation[] getDeclaredAnnotations() {
List<Annotation> result = AnnotationAccess.getDeclaredAnnotations(this);
return result.toArray(new Annotation[result.size()]);
}
@Override public <A extends Annotation> A getAnnotation(Class<A> annotationType) {
if (annotationType == null) {
throw new NullPointerException("annotationType == null");
}
return AnnotationAccess.getDeclaredAnnotation(this, annotationType);
}
@Override public boolean isAnnotationPresent(Class<? extends Annotation> annotationType) {
if (annotationType == null) {
throw new NullPointerException("annotationType == null");
}
return AnnotationAccess.isDeclaredAnnotationPresent(this, annotationType);
}
/**
* Returns the value of the field in the specified object. This reproduces
* the effect of {@code object.fieldName}
*
* <p>If the type of this field is a primitive type, the field value is
* automatically boxed.
*
* <p>If this field is static, the object argument is ignored.
* Otherwise, if the object is null, a NullPointerException is thrown. If
* the object is not an instance of the declaring class of the method, an
* IllegalArgumentException is thrown.
*
* <p>If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value, possibly boxed
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native Object get(Object object) throws IllegalAccessException, IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code
* boolean}. This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native boolean getBoolean(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code byte}.
* This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native byte getByte(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code char}.
* This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native char getChar(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code
* double}. This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native double getDouble(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code float}
* . This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native float getFloat(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as an {@code int}.
* This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native int getInt(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code long}.
* This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native long getLong(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns the value of the field in the specified object as a {@code short}
* . This reproduces the effect of {@code object.fieldName}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* @param object
* the object to access
* @return the field value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native short getShort(Object object) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the value. This
* reproduces the effect of {@code object.fieldName = value}
*
* <p>If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
*
* <p>If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
*
* <p>If the field type is a primitive type, the value is automatically
* unboxed. If the unboxing fails, an IllegalArgumentException is thrown. If
* the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void set(Object object, Object value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code
* boolean} value. This reproduces the effect of {@code object.fieldName =
* value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setBoolean(Object object, boolean value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code byte}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setByte(Object object, byte value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code char}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setChar(Object object, char value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code double}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setDouble(Object object, double value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code float}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setFloat(Object object, float value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Set the value of the field in the specified object to the {@code int}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setInt(Object object, int value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code long}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setLong(Object object, long value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Sets the value of the field in the specified object to the {@code short}
* value. This reproduces the effect of {@code object.fieldName = value}
* <p>
* If this field is static, the object argument is ignored.
* Otherwise, if the object is {@code null}, a NullPointerException is
* thrown. If the object is not an instance of the declaring class of the
* method, an IllegalArgumentException is thrown.
* <p>
* If this Field object is enforcing access control (see AccessibleObject)
* and this field is not accessible from the current context, an
* IllegalAccessException is thrown.
* <p>
* If the value cannot be converted to the field type via a widening
* conversion, an IllegalArgumentException is thrown.
*
* @param object
* the object to access
* @param value
* the new value
* @throws NullPointerException
* if the object is {@code null} and the field is non-static
* @throws IllegalArgumentException
* if the object is not compatible with the declaring class
* @throws IllegalAccessException
* if this field is not accessible
*/
public native void setShort(Object object, short value) throws IllegalAccessException,
IllegalArgumentException;
/**
* Returns a string containing a concise, human-readable description of this
* field.
* <p>
* The format of the string is:
* <ol>
* <li>modifiers (if any)
* <li>type
* <li>declaring class name
* <li>'.'
* <li>field name
* </ol>
* <p>
* For example: {@code public static java.io.InputStream
* java.lang.System.in}
*
* @return a printable representation for this field
*/
@Override
public String toString() {
StringBuilder result = new StringBuilder(Modifier.toString(getModifiers()));
if (result.length() != 0) {
result.append(' ');
}
Types.appendTypeName(result, getType());
result.append(' ');
result.append(getDeclaringClass().getName());
result.append('.');
result.append(getName());
return result.toString();
}
}
|
|
/*
* Copyright 2016 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.data.store.impl.fs;
import stroom.util.io.FileUtil;
import stroom.util.io.StreamUtil;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.assertj.core.api.Assertions.assertThat;
class TestBlockGZIPFiles {
@TempDir
Path tempDir;
@Test
void testSimpleSmallDataInBigBlock() throws IOException {
testWriteAndRead(10000, 99);
}
@Test
void testSimpleDataInLotsOfSmallBlocks() throws IOException {
testWriteAndRead(100, 999);
}
@Test
void testSimpleBounds() throws IOException {
testWriteAndRead(10, 0);
testWriteAndRead(10, 1);
testWriteAndRead(10, 9);
testWriteAndRead(10, 10);
testWriteAndRead(10, 11);
testWriteAndRead(10, 19);
testWriteAndRead(10, 20);
testWriteAndRead(10, 21);
}
@Test
void testBroken() throws IOException {
for (int inBuf = 2; inBuf < 5; inBuf++) {
for (int outBuf = 2; outBuf < 5; outBuf++) {
testWriteAndReadBuffered(9, 100, inBuf, outBuf);
testWriteAndReadBuffered(10, 100, inBuf, outBuf);
testWriteAndReadBuffered(11, 100, inBuf, outBuf);
}
}
}
@Test
void testBufferedSmall() throws IOException {
testWriteAndReadBuffered(10, 30, 3, 3);
testWriteAndReadBuffered(10, 29, 3, 3);
testWriteAndReadBuffered(10, 31, 3, 3);
}
@Test
void testBufferedBig() throws IOException {
testWriteAndReadBuffered(1000, 1000000, 100, 100);
}
@Test
void testBig() throws IOException {
final Path testFile = tempDir.resolve("testBig.bgz");
FileUtil.deleteFile(testFile);
final OutputStream os = new BufferedOutputStream(new BlockGZIPOutputFile(testFile, 1000000));
for (int i = 0; i < 10000; i++) {
os.write("some data that may compress quite well TEST\n".getBytes(StreamUtil.DEFAULT_CHARSET));
os.write(("some other information TEST\n" + i).getBytes(StreamUtil.DEFAULT_CHARSET));
os.write("concurrent testing TEST\n".getBytes(StreamUtil.DEFAULT_CHARSET));
os.write("TEST TEST TEST\n".getBytes(StreamUtil.DEFAULT_CHARSET));
os.write("JAMES BETTY TEST\n".getBytes(StreamUtil.DEFAULT_CHARSET));
os.write("FRED TEST\n".getBytes(StreamUtil.DEFAULT_CHARSET));
os.write("<XML> TEST\n".getBytes(StreamUtil.DEFAULT_CHARSET));
}
os.close();
final InputStream is = new BlockGZIPInputFile(testFile);
is.mark(-1);
is.skip(1);
is.read();
is.reset();
final byte[] testBuf = new byte[10000];
while ((is.read(testBuf)) != -1) {
// Ignore
}
is.close();
assertThat(FileUtil.delete(testFile))
.withFailMessage("Should not have any locks on file")
.isTrue();
assertThat(Files.isRegularFile(testFile))
.withFailMessage("file deleted")
.isFalse();
}
private void testWriteAndRead(final int blockSize, final int fileSize) throws IOException {
final Path file = Files.createTempFile(tempDir, "test", ".bgz");
FileUtil.deleteFile(file);
// Stupid Block Size For Testing
final BlockGZIPOutputFile outStream = new BlockGZIPOutputFile(file, blockSize);
for (int i = 0; i < fileSize; i++) {
outStream.write((byte) i);
}
outStream.close();
final BlockGZIPInputFile inStream = new BlockGZIPInputFile(file);
byte expected = 0;
int actual;
while ((actual = inStream.read()) != -1) {
assertThat((byte) actual).isEqualTo(expected);
expected++;
}
inStream.close();
assertThat((byte) fileSize).withFailMessage("Expected to load records").isEqualTo(expected);
}
private void testWriteAndReadBuffered(final int blockSize, final int fileSize, final int inBuff, final int outBuf)
throws IOException {
final Path file = Files.createTempFile(tempDir, "test", ".bgz");
FileUtil.deleteFile(file);
// Stupid Block Size For Testing
final OutputStream outStream = new BufferedOutputStream(new BlockGZIPOutputFile(file, blockSize), outBuf);
for (int i = 0; i < fileSize; i++) {
outStream.write((byte) i);
}
outStream.close();
final InputStream inStream = new BufferedInputStream(new BlockGZIPInputFile(file, inBuff), inBuff);
byte expected = 0;
int actual;
while ((actual = inStream.read()) != -1) {
assertThat((byte) actual).isEqualTo(expected);
expected++;
}
inStream.close();
assertThat((byte) fileSize).withFailMessage("Expected to load records").isEqualTo(expected);
}
@Test
void testSeeking() throws IOException {
final Path file = tempDir.resolve("test.bgz");
FileUtil.deleteFile(file);
// Stupid Block Size For Testing
final BlockGZIPOutputFile outStream = new BlockGZIPOutputFile(file, 10);
for (byte i = 0; i < 105; i++) {
outStream.write(i);
}
outStream.close();
BlockGZIPInputFile inStream = new BlockGZIPInputFile(file, 10);
inStream.mark(0);
assertThat(inStream.skip(9)).isEqualTo(9);
assertThat(inStream.read()).isEqualTo(9);
inStream.reset();
// inStream.reset();
assertThat(inStream.skip(50)).isEqualTo(50);
assertThat(inStream.read()).isEqualTo(50);
inStream.reset();
assertThat(inStream.skip(100)).isEqualTo(100);
assertThat(inStream.read()).isEqualTo(100);
inStream.reset();
assertThat(inStream.skip(104)).isEqualTo(104);
assertThat(inStream.read()).isEqualTo(104);
inStream.reset();
assertThat(inStream.skip(50)).isEqualTo(50);
inStream.mark(-1);
assertThat(inStream.read()).isEqualTo(50);
inStream.reset();
assertThat(inStream.read()).isEqualTo(50);
inStream.close();
inStream = new BlockGZIPInputFile(file, 10);
final byte[] testRead = new byte[50];
assertThat(inStream.getPosition()).isEqualTo(0);
StreamUtil.fillBuffer(inStream, testRead);
assertThat(inStream.getPosition()).isEqualTo(50);
// Seek back and re-read
inStream.seek(0);
StreamUtil.fillBuffer(inStream, testRead);
assertThat(inStream.getPosition()).isEqualTo(50);
// Go back
for (byte i = 94; i >= 0; i--) {
inStream.seek(i);
assertThat(inStream.read()).isEqualTo(i);
inStream.skip(9);
assertThat(inStream.read()).isEqualTo(i + 10);
}
// Go forward
for (byte i = 3; i < 100; i += 8) {
inStream.seek(i);
assertThat(inStream.read()).isEqualTo(i);
}
inStream.close();
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.migrationhubrefactorspaces.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/migration-hub-refactor-spaces-2021-10-26/GetApplication"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetApplicationResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The endpoint URL of the API Gateway proxy.
* </p>
*/
private ApiGatewayProxyConfig apiGatewayProxy;
/**
* <p>
* The unique identifier of the application.
* </p>
*/
private String applicationId;
/**
* <p>
* The Amazon Resource Name (ARN) of the application.
* </p>
*/
private String arn;
/**
* <p>
* The Amazon Web Services account ID of the application creator.
* </p>
*/
private String createdByAccountId;
/**
* <p>
* A timestamp that indicates when the application is created.
* </p>
*/
private java.util.Date createdTime;
/**
* <p>
* The unique identifier of the environment.
* </p>
*/
private String environmentId;
/**
* <p>
* Any error associated with the application resource.
* </p>
*/
private ErrorResponse error;
/**
* <p>
* A timestamp that indicates when the application was last updated.
* </p>
*/
private java.util.Date lastUpdatedTime;
/**
* <p>
* The name of the application.
* </p>
*/
private String name;
/**
* <p>
* The Amazon Web Services account ID of the application owner (which is always the same as the environment owner
* account ID).
* </p>
*/
private String ownerAccountId;
/**
* <p>
* The proxy type of the proxy created within the application.
* </p>
*/
private String proxyType;
/**
* <p>
* The current state of the application.
* </p>
*/
private String state;
/**
* <p>
* The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services resource. Each
* tag consists of a key-value pair.
* </p>
*/
private java.util.Map<String, String> tags;
/**
* <p>
* The ID of the virtual private cloud (VPC).
* </p>
*/
private String vpcId;
/**
* <p>
* The endpoint URL of the API Gateway proxy.
* </p>
*
* @param apiGatewayProxy
* The endpoint URL of the API Gateway proxy.
*/
public void setApiGatewayProxy(ApiGatewayProxyConfig apiGatewayProxy) {
this.apiGatewayProxy = apiGatewayProxy;
}
/**
* <p>
* The endpoint URL of the API Gateway proxy.
* </p>
*
* @return The endpoint URL of the API Gateway proxy.
*/
public ApiGatewayProxyConfig getApiGatewayProxy() {
return this.apiGatewayProxy;
}
/**
* <p>
* The endpoint URL of the API Gateway proxy.
* </p>
*
* @param apiGatewayProxy
* The endpoint URL of the API Gateway proxy.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withApiGatewayProxy(ApiGatewayProxyConfig apiGatewayProxy) {
setApiGatewayProxy(apiGatewayProxy);
return this;
}
/**
* <p>
* The unique identifier of the application.
* </p>
*
* @param applicationId
* The unique identifier of the application.
*/
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
/**
* <p>
* The unique identifier of the application.
* </p>
*
* @return The unique identifier of the application.
*/
public String getApplicationId() {
return this.applicationId;
}
/**
* <p>
* The unique identifier of the application.
* </p>
*
* @param applicationId
* The unique identifier of the application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withApplicationId(String applicationId) {
setApplicationId(applicationId);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the application.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) of the application.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the application.
* </p>
*
* @return The Amazon Resource Name (ARN) of the application.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the application.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) of the application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withArn(String arn) {
setArn(arn);
return this;
}
/**
* <p>
* The Amazon Web Services account ID of the application creator.
* </p>
*
* @param createdByAccountId
* The Amazon Web Services account ID of the application creator.
*/
public void setCreatedByAccountId(String createdByAccountId) {
this.createdByAccountId = createdByAccountId;
}
/**
* <p>
* The Amazon Web Services account ID of the application creator.
* </p>
*
* @return The Amazon Web Services account ID of the application creator.
*/
public String getCreatedByAccountId() {
return this.createdByAccountId;
}
/**
* <p>
* The Amazon Web Services account ID of the application creator.
* </p>
*
* @param createdByAccountId
* The Amazon Web Services account ID of the application creator.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withCreatedByAccountId(String createdByAccountId) {
setCreatedByAccountId(createdByAccountId);
return this;
}
/**
* <p>
* A timestamp that indicates when the application is created.
* </p>
*
* @param createdTime
* A timestamp that indicates when the application is created.
*/
public void setCreatedTime(java.util.Date createdTime) {
this.createdTime = createdTime;
}
/**
* <p>
* A timestamp that indicates when the application is created.
* </p>
*
* @return A timestamp that indicates when the application is created.
*/
public java.util.Date getCreatedTime() {
return this.createdTime;
}
/**
* <p>
* A timestamp that indicates when the application is created.
* </p>
*
* @param createdTime
* A timestamp that indicates when the application is created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withCreatedTime(java.util.Date createdTime) {
setCreatedTime(createdTime);
return this;
}
/**
* <p>
* The unique identifier of the environment.
* </p>
*
* @param environmentId
* The unique identifier of the environment.
*/
public void setEnvironmentId(String environmentId) {
this.environmentId = environmentId;
}
/**
* <p>
* The unique identifier of the environment.
* </p>
*
* @return The unique identifier of the environment.
*/
public String getEnvironmentId() {
return this.environmentId;
}
/**
* <p>
* The unique identifier of the environment.
* </p>
*
* @param environmentId
* The unique identifier of the environment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withEnvironmentId(String environmentId) {
setEnvironmentId(environmentId);
return this;
}
/**
* <p>
* Any error associated with the application resource.
* </p>
*
* @param error
* Any error associated with the application resource.
*/
public void setError(ErrorResponse error) {
this.error = error;
}
/**
* <p>
* Any error associated with the application resource.
* </p>
*
* @return Any error associated with the application resource.
*/
public ErrorResponse getError() {
return this.error;
}
/**
* <p>
* Any error associated with the application resource.
* </p>
*
* @param error
* Any error associated with the application resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withError(ErrorResponse error) {
setError(error);
return this;
}
/**
* <p>
* A timestamp that indicates when the application was last updated.
* </p>
*
* @param lastUpdatedTime
* A timestamp that indicates when the application was last updated.
*/
public void setLastUpdatedTime(java.util.Date lastUpdatedTime) {
this.lastUpdatedTime = lastUpdatedTime;
}
/**
* <p>
* A timestamp that indicates when the application was last updated.
* </p>
*
* @return A timestamp that indicates when the application was last updated.
*/
public java.util.Date getLastUpdatedTime() {
return this.lastUpdatedTime;
}
/**
* <p>
* A timestamp that indicates when the application was last updated.
* </p>
*
* @param lastUpdatedTime
* A timestamp that indicates when the application was last updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withLastUpdatedTime(java.util.Date lastUpdatedTime) {
setLastUpdatedTime(lastUpdatedTime);
return this;
}
/**
* <p>
* The name of the application.
* </p>
*
* @param name
* The name of the application.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the application.
* </p>
*
* @return The name of the application.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the application.
* </p>
*
* @param name
* The name of the application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The Amazon Web Services account ID of the application owner (which is always the same as the environment owner
* account ID).
* </p>
*
* @param ownerAccountId
* The Amazon Web Services account ID of the application owner (which is always the same as the environment
* owner account ID).
*/
public void setOwnerAccountId(String ownerAccountId) {
this.ownerAccountId = ownerAccountId;
}
/**
* <p>
* The Amazon Web Services account ID of the application owner (which is always the same as the environment owner
* account ID).
* </p>
*
* @return The Amazon Web Services account ID of the application owner (which is always the same as the environment
* owner account ID).
*/
public String getOwnerAccountId() {
return this.ownerAccountId;
}
/**
* <p>
* The Amazon Web Services account ID of the application owner (which is always the same as the environment owner
* account ID).
* </p>
*
* @param ownerAccountId
* The Amazon Web Services account ID of the application owner (which is always the same as the environment
* owner account ID).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withOwnerAccountId(String ownerAccountId) {
setOwnerAccountId(ownerAccountId);
return this;
}
/**
* <p>
* The proxy type of the proxy created within the application.
* </p>
*
* @param proxyType
* The proxy type of the proxy created within the application.
* @see ProxyType
*/
public void setProxyType(String proxyType) {
this.proxyType = proxyType;
}
/**
* <p>
* The proxy type of the proxy created within the application.
* </p>
*
* @return The proxy type of the proxy created within the application.
* @see ProxyType
*/
public String getProxyType() {
return this.proxyType;
}
/**
* <p>
* The proxy type of the proxy created within the application.
* </p>
*
* @param proxyType
* The proxy type of the proxy created within the application.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ProxyType
*/
public GetApplicationResult withProxyType(String proxyType) {
setProxyType(proxyType);
return this;
}
/**
* <p>
* The proxy type of the proxy created within the application.
* </p>
*
* @param proxyType
* The proxy type of the proxy created within the application.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ProxyType
*/
public GetApplicationResult withProxyType(ProxyType proxyType) {
this.proxyType = proxyType.toString();
return this;
}
/**
* <p>
* The current state of the application.
* </p>
*
* @param state
* The current state of the application.
* @see ApplicationState
*/
public void setState(String state) {
this.state = state;
}
/**
* <p>
* The current state of the application.
* </p>
*
* @return The current state of the application.
* @see ApplicationState
*/
public String getState() {
return this.state;
}
/**
* <p>
* The current state of the application.
* </p>
*
* @param state
* The current state of the application.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ApplicationState
*/
public GetApplicationResult withState(String state) {
setState(state);
return this;
}
/**
* <p>
* The current state of the application.
* </p>
*
* @param state
* The current state of the application.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ApplicationState
*/
public GetApplicationResult withState(ApplicationState state) {
this.state = state.toString();
return this;
}
/**
* <p>
* The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services resource. Each
* tag consists of a key-value pair.
* </p>
*
* @return The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services
* resource. Each tag consists of a key-value pair.
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* <p>
* The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services resource. Each
* tag consists of a key-value pair.
* </p>
*
* @param tags
* The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services resource.
* Each tag consists of a key-value pair.
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* <p>
* The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services resource. Each
* tag consists of a key-value pair.
* </p>
*
* @param tags
* The tags assigned to the application. A tag is a label that you assign to an Amazon Web Services resource.
* Each tag consists of a key-value pair.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
/**
* Add a single Tags entry
*
* @see GetApplicationResult#withTags
* @returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult clearTagsEntries() {
this.tags = null;
return this;
}
/**
* <p>
* The ID of the virtual private cloud (VPC).
* </p>
*
* @param vpcId
* The ID of the virtual private cloud (VPC).
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* The ID of the virtual private cloud (VPC).
* </p>
*
* @return The ID of the virtual private cloud (VPC).
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* The ID of the virtual private cloud (VPC).
* </p>
*
* @param vpcId
* The ID of the virtual private cloud (VPC).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetApplicationResult withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApiGatewayProxy() != null)
sb.append("ApiGatewayProxy: ").append(getApiGatewayProxy()).append(",");
if (getApplicationId() != null)
sb.append("ApplicationId: ").append(getApplicationId()).append(",");
if (getArn() != null)
sb.append("Arn: ").append(getArn()).append(",");
if (getCreatedByAccountId() != null)
sb.append("CreatedByAccountId: ").append(getCreatedByAccountId()).append(",");
if (getCreatedTime() != null)
sb.append("CreatedTime: ").append(getCreatedTime()).append(",");
if (getEnvironmentId() != null)
sb.append("EnvironmentId: ").append(getEnvironmentId()).append(",");
if (getError() != null)
sb.append("Error: ").append(getError()).append(",");
if (getLastUpdatedTime() != null)
sb.append("LastUpdatedTime: ").append(getLastUpdatedTime()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getOwnerAccountId() != null)
sb.append("OwnerAccountId: ").append(getOwnerAccountId()).append(",");
if (getProxyType() != null)
sb.append("ProxyType: ").append(getProxyType()).append(",");
if (getState() != null)
sb.append("State: ").append(getState()).append(",");
if (getTags() != null)
sb.append("Tags: ").append("***Sensitive Data Redacted***").append(",");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetApplicationResult == false)
return false;
GetApplicationResult other = (GetApplicationResult) obj;
if (other.getApiGatewayProxy() == null ^ this.getApiGatewayProxy() == null)
return false;
if (other.getApiGatewayProxy() != null && other.getApiGatewayProxy().equals(this.getApiGatewayProxy()) == false)
return false;
if (other.getApplicationId() == null ^ this.getApplicationId() == null)
return false;
if (other.getApplicationId() != null && other.getApplicationId().equals(this.getApplicationId()) == false)
return false;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null && other.getArn().equals(this.getArn()) == false)
return false;
if (other.getCreatedByAccountId() == null ^ this.getCreatedByAccountId() == null)
return false;
if (other.getCreatedByAccountId() != null && other.getCreatedByAccountId().equals(this.getCreatedByAccountId()) == false)
return false;
if (other.getCreatedTime() == null ^ this.getCreatedTime() == null)
return false;
if (other.getCreatedTime() != null && other.getCreatedTime().equals(this.getCreatedTime()) == false)
return false;
if (other.getEnvironmentId() == null ^ this.getEnvironmentId() == null)
return false;
if (other.getEnvironmentId() != null && other.getEnvironmentId().equals(this.getEnvironmentId()) == false)
return false;
if (other.getError() == null ^ this.getError() == null)
return false;
if (other.getError() != null && other.getError().equals(this.getError()) == false)
return false;
if (other.getLastUpdatedTime() == null ^ this.getLastUpdatedTime() == null)
return false;
if (other.getLastUpdatedTime() != null && other.getLastUpdatedTime().equals(this.getLastUpdatedTime()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getOwnerAccountId() == null ^ this.getOwnerAccountId() == null)
return false;
if (other.getOwnerAccountId() != null && other.getOwnerAccountId().equals(this.getOwnerAccountId()) == false)
return false;
if (other.getProxyType() == null ^ this.getProxyType() == null)
return false;
if (other.getProxyType() != null && other.getProxyType().equals(this.getProxyType()) == false)
return false;
if (other.getState() == null ^ this.getState() == null)
return false;
if (other.getState() != null && other.getState().equals(this.getState()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getApiGatewayProxy() == null) ? 0 : getApiGatewayProxy().hashCode());
hashCode = prime * hashCode + ((getApplicationId() == null) ? 0 : getApplicationId().hashCode());
hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime * hashCode + ((getCreatedByAccountId() == null) ? 0 : getCreatedByAccountId().hashCode());
hashCode = prime * hashCode + ((getCreatedTime() == null) ? 0 : getCreatedTime().hashCode());
hashCode = prime * hashCode + ((getEnvironmentId() == null) ? 0 : getEnvironmentId().hashCode());
hashCode = prime * hashCode + ((getError() == null) ? 0 : getError().hashCode());
hashCode = prime * hashCode + ((getLastUpdatedTime() == null) ? 0 : getLastUpdatedTime().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getOwnerAccountId() == null) ? 0 : getOwnerAccountId().hashCode());
hashCode = prime * hashCode + ((getProxyType() == null) ? 0 : getProxyType().hashCode());
hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
return hashCode;
}
@Override
public GetApplicationResult clone() {
try {
return (GetApplicationResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
package Armadillo.Core.Text;
import java.util.ArrayList;
import java.util.List;
import Armadillo.Core.KeyValuePair;
import Armadillo.Core.Logger;
import Armadillo.Core.ParserHelper;
import Armadillo.Core.Text.StringHelper;
import Armadillo.Core.Text.TokeniserHelper;
public class Tokeniser {
public static String[] Tokenise(
String str,
boolean blnIgnoreDigits)
{
try
{
if(StringHelper.IsNullOrEmpty(str))
{
return new String[0];
}
String[] substrings = TokeniserHelper.tokenise(str, "\t\b\n ");
List<String> substringsList = new ArrayList<String>(substrings.length);
for (int i = 0; i < substrings.length; i++)
{
String strCurrStr = substrings[i];
if (!StringHelper.IsNullOrEmpty(substrings[i]))
{
strCurrStr = StringHelper.RemoveCommonSymbols(strCurrStr);
if (!StringHelper.IsNullOrEmpty(strCurrStr))
{
double[] dblParsedNumber = new double[1];
if (Character.isDigit(strCurrStr.charAt(0)) &&
ParserHelper.IsNumeric(strCurrStr, dblParsedNumber))
{
substringsList.add(dblParsedNumber[0] + "");
}
else
{
List<KeyValuePair<String, Boolean>> digitLeters = SplitDigitLetters(strCurrStr);
if (digitLeters != null &&
digitLeters.size() > 0)
{
for (int j = 0; j < digitLeters.size(); j++)
{
KeyValuePair<String, Boolean> kvp = digitLeters.get(j);
if (kvp.getValue())
{
if (ParserHelper.IsNumeric(kvp.getKey(), dblParsedNumber))
{
substringsList.add(dblParsedNumber[0] + "");
}
else
{
TokeniseAndAdd(kvp.getKey(), substringsList, blnIgnoreDigits);
}
}
else
{
TokeniseAndAdd(kvp.getKey(), substringsList, blnIgnoreDigits);
}
}
}
}
}
}
}
return substringsList.toArray(new String[0]);
}
catch(Exception ex)
{
Logger.log(ex);
}
return new String[0];
}
private static void TokeniseAndAdd(
String str,
List<String> substringsList,
boolean blnIgnoreDigits)
{
String[] tokens = Tokenise0(str, blnIgnoreDigits);
if(tokens != null &&
tokens.length > 0 &&
!(tokens.length == 1 &&
StringHelper.IsNullOrEmpty(tokens[0])))
{
for(String strCurrTok : tokens){
substringsList.add(strCurrTok);
}
}
}
private static String[] GetEmptyTokenSet()
{
try
{
String[] arr = new String[1];
arr[0] = "";
return arr;
}
catch(Exception ex)
{
Logger.log(ex);
}
return new String[0];
}
public static TokenWrapper[] TokeniseAndWrap(
String strInput,
String[] strStopWordsArr)
{
try
{
if (strStopWordsArr == null)
{
return TokeniseAndWrap(
strInput);
}
String[] tokens = Tokenise(strInput,
strStopWordsArr,
false);
return WrapTokens(tokens);
}
catch(Exception ex)
{
Logger.log(ex);
}
return new TokenWrapper[0];
}
public static String[] Tokenise(
String strInput,
String[] strStopWordsArr)
{
return Tokenise(
strInput,
strStopWordsArr,
false);
}
public static String[] Tokenise(
String strInput,
String[] strStopWordsArr,
boolean blnIgnoreNumbers)
{
try
{
if (strStopWordsArr == null)
{
return Tokenise(strInput,
blnIgnoreNumbers);
}
String[] tokenArr = Tokenise(
strInput,
blnIgnoreNumbers);
ArrayList<String> tokenList =
new ArrayList<String>(tokenArr.length);
for (String strToken : tokenArr)
{
boolean blnAddToken = true;
for (String strStopWord : strStopWordsArr)
{
if (!strStopWord.equals(""))
{
if (strStopWord.equals(strToken))
{
blnAddToken = false;
}
}
}
if (blnAddToken)
{
tokenList.add(strToken);
}
}
tokenArr = tokenList.toArray(new String[0]);
return tokenArr;
}
catch(Exception ex)
{
Logger.log(ex);
}
return new String[0];
}
public static TokenWrapper[] TokeniseAndWrap(String strInput)
{
String[] tokens = Tokenise(strInput, false);
TokenWrapper[] tokenWraps = WrapTokens(tokens);
return tokenWraps;
}
public static TokenWrapper[] WrapTokens(String[] tokens)
{
try
{
TokenWrapper[] tokenWraps = new TokenWrapper[tokens.length];
for (int i = 0; i < tokens.length; i++)
{
String strToken = tokens[i];
tokenWraps[i] = new TokenWrapper(strToken);
}
return tokenWraps;
}
catch(Exception ex)
{
Logger.log(ex);
}
return new TokenWrapper[0];
}
public static String[] Tokenise(
String strInput)
{
return Tokenise(strInput, false);
}
public static List<KeyValuePair<String,Boolean>> SplitDigitLetters(
String strInput)
{
try
{
if (StringHelper.IsNullOrEmpty(strInput))
{
return null;
}
strInput = strInput.toLowerCase();
List<KeyValuePair<String, Boolean>> tokens = new ArrayList<KeyValuePair<String, Boolean>>();
int cursor = 0;
int length = strInput.length();
while (cursor < length)
{
char ch = strInput.charAt(cursor);
if (ch == ' ')
{
cursor++;
}
else if (!Character.isDigit(ch))
{
String word = "";
while (cursor < length &&
!Character.isDigit(strInput.charAt(cursor)))
{
word += strInput.charAt(cursor);
cursor++;
}
tokens.add(new KeyValuePair<String, Boolean>(word, false));
}
else if (!Character.isLetter(ch))
{
String word = "";
while (cursor < length &&
!Character.isLetter(strInput.charAt(cursor)))
{
word += strInput.charAt(cursor);
cursor++;
}
tokens.add(new KeyValuePair<String, Boolean>(word, true));
}
else
{
cursor++;
}
}
if (tokens.size() == 0)
{
return null;
}
return tokens;
}
catch (Exception ex)
{
Logger.log(ex);
}
return null;
}
private static String[] Tokenise0(
String strInput,
boolean blnIgnoreNumbers)
{
try
{
if (StringHelper.IsNullOrEmpty(strInput))
{
String[] tokenArray = new String[1];
tokenArray[0] = "";
return tokenArray;
}
strInput = strInput.toLowerCase();
List<String> tokens = new ArrayList<String>();
int cursor = 0;
int length = strInput.length();
while (cursor < length)
{
char ch = strInput.charAt(cursor);
if (ch == ' ')
{
cursor++;
}
else if (Character.isLetter(ch))
{
String word = "";
while (cursor < length &&
Character.isLetter(strInput.charAt(cursor)))
{
word += strInput.charAt(cursor);
cursor++;
}
tokens.add(word);
}
else if (Character.isDigit(ch))
{
String word = "";
while (cursor < length &&
Character.isDigit(strInput.charAt(cursor)))
{
word += strInput.charAt(cursor);
cursor++;
}
tokens.add(word);
}
else
{
cursor++;
}
}
if (tokens.size() == 0)
{
return GetEmptyTokenSet();
}
ArrayList<String> outTokens = new ArrayList<String>();
for (int i = 0; i < tokens.size(); i++)
{
String strToken = tokens.get(i);
if (!StringHelper.IsNullOrEmpty(strToken))
{
if ((blnIgnoreNumbers &&
StringHelper.AllLetters(strToken)) ||
!blnIgnoreNumbers)
{
outTokens.add(strToken);
}
}
}
return outTokens.toArray(new String[0]);
}
catch(Exception ex)
{
Logger.log(ex);
}
return new String[0];
}
}
|
|
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.k2crypto.storage;
import com.google.k2crypto.K2Context;
import com.google.k2crypto.Key;
import com.google.k2crypto.storage.driver.Driver;
import com.google.k2crypto.storage.driver.ReadableDriver;
import com.google.k2crypto.storage.driver.WrappingDriver;
import com.google.k2crypto.storage.driver.WritableDriver;
import java.net.URI;
/**
* The interface to access a {@link Key} storage location.
*
* <p>This class is conditionally thread-safe; {@link #wrapWith(Key)} and
* {@link #noWrap()} should not be called concurrently to avoid
* non-deterministic {@link #save(Key)} and {@link #load()} behavior.
*
* @author [email protected] (Daryl Seah)
*/
public class Store {
// Context for the current K2 session
private final K2Context context;
// Driver installation backing the store
private final InstalledDriver installedDriver;
// Driver instance being wrapped
private final Driver driver;
// Storage address that the store points to
private URI address;
// Synchronization lock
private final Object lock = new int[0];
// Initial state is always the initial state
private State state = State.INITIAL;
// Possible states of the store object
private enum State {
INITIAL, OPEN, CLOSED
}
/**
* Constructs a Store that is backed by the given driver.
*
* @param installedDriver Driver installed for the store.
* @param address Address to open the store with.
*/
Store(InstalledDriver installedDriver, URI address) {
if (installedDriver == null) {
throw new NullPointerException("installedDriver");
} else if (address == null) {
throw new NullPointerException("address");
}
this.installedDriver = installedDriver;
this.context = installedDriver.getContext();
this.driver = installedDriver.instantiate();
// The address could also be passed in through open(), but the constructor
// seems safer because hashCode(), equals() and toString() depend on address
// not being null. We do not want the object to be in a completely broken
// state after construction.
this.address = address;
}
/**
* Returns the context associated with the Store.
*/
public K2Context getContext() {
return context;
}
/**
* Returns the address of the storage location that keys will be read from
* or written to.
*/
public URI getAddress() {
return address;
}
/**
* Returns information about the driver installation backing the store.
*/
public InstalledDriver getInstalledDriver() {
return installedDriver;
}
/**
* Provides access to the driver instance (for testing).
*/
Driver getDriver() {
return driver;
}
/**
* Opens the store for loading/saving keys.
*
* @return the opened store.
*
* @throws IllegalAddressException if the address is not recognized.
* @throws StoreStateException if the store is already opened (or closed).
* @throws StoreException if there is a driver-specific issue.
*/
Store open() throws IllegalAddressException, StoreException {
// This method is package-restricted because K2Storage automatically
// opens the Store; there is no need for external code to see open().
try {
synchronized (lock) {
switch (state) {
default: // Closed
throw new StoreStateException(StoreStateException.Reason.ALREADY_CLOSED);
case OPEN:
throw new StoreStateException(StoreStateException.Reason.ALREADY_OPEN);
case INITIAL:
// The driver may hurl on open(), so we defer changing state till
// after it is done. Depending on the driver, it may not be safe
// to invoke the read/write methods if open() fails.
URI driverAddress = driver.open(address);
if (driverAddress != null) {
// Driver may provide a transformed address on open()
address = driverAddress;
}
state = State.OPEN;
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
return this;
}
/**
* Closes the store and frees any allocated resources. Reopening the store is
* not permitted.
*/
public void close() {
synchronized (lock) {
try {
if (state == State.OPEN) {
driver.close();
}
} finally {
// No matter what happens, we want the state to be
// closed when this is done.
state = State.CLOSED;
}
}
}
/**
* Returns {@code true} if, and only if, the store is open.
*/
public boolean isOpen() {
synchronized (lock) {
return state == State.OPEN;
}
}
/**
* Utility method to check if the store is open for business.
*
* @throws StoreStateException if the store is not open.
*/
private void checkOpen() throws StoreStateException {
synchronized (lock) {
switch (state) {
default: // Closed
throw new StoreStateException(StoreStateException.Reason.ALREADY_CLOSED);
case INITIAL:
throw new StoreStateException(StoreStateException.Reason.NOT_OPEN);
case OPEN:
}
}
}
/**
* Indicates that subsequent saves/loads on this store should be
* wrapped/unwrapped with the provided key.
*
* @param key Key protecting the actual stored key.
*
* @return this Store, for method chaining.
*
* @throws StoreStateException if the store is not open.
* @throws UnsupportedByStoreException if wrapping is not supported.
* @throws StoreException if there is a driver-specific issue with the key.
*/
public Store wrapWith(Key key) throws StoreException {
// NOTE: the key might be unsuitable for wrapping because of purpose
// restrictions, which means we will need to add a PurposeException later.
if (key == null) {
throw new NullPointerException("key");
}
try {
synchronized (lock) {
checkOpen();
if (driver instanceof WrappingDriver) {
((WrappingDriver) driver).wrapWith(key);
} else {
throw new UnsupportedByStoreException(UnsupportedByStoreException.Reason.NO_WRAP);
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
return this;
}
/**
* Indicates that subsequent saves/loads on this store will not be wrapped.
*
* @return this Store, for method chaining.
*
* @throws StoreStateException if the store is not open.
* @throws StoreException if there is a driver-specific issue with disabling
* wrapping.
*/
public Store noWrap() throws StoreException {
try {
synchronized (lock) {
checkOpen();
// We are basically expanding wrapWith implemented at the driver
// so that it will be clearer to the user of the store
if (driver instanceof WrappingDriver) {
((WrappingDriver) driver).wrapWith(null);
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
return this;
}
/**
* Returns {@code true} if a wrapping key is currently set (with
* {@link #wrapWith(Key)}), {@code false} otherwise.
*
* @throws StoreStateException if the store is not open.
* @throws StoreException if there is a driver-specific issue.
*/
public boolean isWrapping() throws StoreException {
try {
synchronized (lock) {
checkOpen();
if (driver instanceof WrappingDriver) {
return ((WrappingDriver) driver).isWrapping();
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
return false;
}
/**
* Returns {@code true} if there is no key stored at this location,
* {@code false} if one might be present.
*
* <p>Note that if this method returns false, there is no a guarantee that
* the key will actually be readable. The data might be encrypted, corrupted
* or be in an invalid format. An attempt must be made to {@link #load()} to
* know for sure if it is readable.
*
* @throws StoreStateException if the store is not open.
* @throws StoreIOException if there is an I/O issue with checking emptiness.
* @throws UnsupportedByStoreException if the store is write-only.
* @throws StoreException if there is a driver-specific issue.
*/
public boolean isEmpty() throws StoreException {
try {
synchronized (lock) {
checkOpen();
if (driver instanceof ReadableDriver) {
return ((ReadableDriver) driver).isEmpty();
} else {
// Non-readable implies the driver must be writable
throw new UnsupportedByStoreException(UnsupportedByStoreException.Reason.WRITE_ONLY);
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
}
/**
* Saves the given key to the store. Any existing key will be silently
* replaced, regardless of whether it is wrapped.
*
* @param key Key to save.
*
* @throws StoreStateException if the store is not open.
* @throws StoreIOException if there is an I/O issue with saving the key.
* @throws UnsupportedByStoreException if the store is read-only.
* @throws StoreException if there is a driver-specific issue with saving.
*/
public void save(Key key) throws StoreException {
if (key == null) {
throw new NullPointerException("key");
}
try {
synchronized (lock) {
checkOpen();
if (driver instanceof WritableDriver) {
((WritableDriver) driver).save(key);
} else {
// Non-writable implies the driver must be readable
throw new UnsupportedByStoreException(UnsupportedByStoreException.Reason.READ_ONLY);
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
}
/**
* Loads the key stored at this location.
*
* @return the stored key or null if the location is empty.
*
* @throws StoreStateException if the store is not open.
* @throws StoreIOException if there is an I/O issue with loading the key.
* @throws UnsupportedByStoreException if the store is write-only.
* @throws StoreException if there is a driver-specific issue with loading.
*/
public Key load() throws StoreException {
try {
synchronized (lock) {
checkOpen();
if (driver instanceof ReadableDriver) {
return ((ReadableDriver) driver).load();
} else {
// Non-readable implies the driver must be writable
throw new UnsupportedByStoreException(UnsupportedByStoreException.Reason.WRITE_ONLY);
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
}
/**
* Erases any stored key, regardless of whether it is wrapped.
*
* @return {@code true} if, and only if, there was data present and it has
* been erased.
*
* @throws StoreStateException if the store is not open.
* @throws StoreIOException if there is an I/O issue with erasing the key.
* @throws UnsupportedByStoreException if the store is read-only.
* @throws StoreException if there is a driver-specific issue with erasing.
*/
public boolean erase() throws StoreException {
try {
synchronized (lock) {
checkOpen();
if (driver instanceof WritableDriver) {
return ((WritableDriver) driver).erase();
} else {
// Non-writable implies the driver must be readable
throw new UnsupportedByStoreException(UnsupportedByStoreException.Reason.READ_ONLY);
}
}
} catch (StoreException ex) {
ex.setStore(this);
throw ex;
}
}
/**
* Returns the hash-code for the store, which is the hash of the URI address.
*/
@Override
public int hashCode() {
return address.hashCode();
}
/**
* Tests the store for equality with an object.
*
* @param obj Object to compare to.
*
* @return {@code true} if, and only if, the object is also a Store and it
* has the same address and driver as this one.
*/
@Override
public boolean equals(Object obj) {
if (obj instanceof Store) {
Store other = (Store) obj;
return other.address.equals(address) && other.installedDriver.equals(installedDriver);
}
return false;
}
/**
* @see Object#toString()
*/
@Override
public String toString() {
return address + "(" + state + ")";
}
}
|
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.app.catalog.resources;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.registry.core.app.catalog.model.ComputeResource;
import org.apache.airavata.registry.core.app.catalog.model.ComputeResourceFileSystem;
import org.apache.airavata.registry.core.app.catalog.model.ComputeResourceFileSystem_PK;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogJPAUtils;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogQueryGenerator;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogResourceType;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ComputeResourceFileSystemResource extends AppCatAbstractResource {
private final static Logger logger = LoggerFactory.getLogger(ComputeResourceFileSystemResource.class);
private String computeResourceId;
private ComputeResourceResource computeHostResource;
private String path;
private String fileSystem;
@Override
public void remove(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap<String, String>) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_FILE_SYSTEM);
generator.setParameter(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID, ids.get(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID));
generator.setParameter(ComputeResourceFileSystemConstants.FILE_SYSTEM, ids.get(ComputeResourceFileSystemConstants.FILE_SYSTEM));
Query q = generator.deleteQuery(em);
q.executeUpdate();
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public AppCatalogResource get(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap<String, String>) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_FILE_SYSTEM);
generator.setParameter(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID, ids.get(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID));
generator.setParameter(ComputeResourceFileSystemConstants.FILE_SYSTEM, ids.get(ComputeResourceFileSystemConstants.FILE_SYSTEM));
Query q = generator.selectQuery(em);
ComputeResourceFileSystem computeResourceFileSystem = (ComputeResourceFileSystem) q.getSingleResult();
ComputeResourceFileSystemResource computeResourceFileSystemResource = (ComputeResourceFileSystemResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.COMPUTE_RESOURCE_FILE_SYSTEM, computeResourceFileSystem);
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
return computeResourceFileSystemResource;
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public List<AppCatalogResource> get(String fieldName, Object value) throws AppCatalogException {
List<AppCatalogResource> computeResourceFileSystemResources = new ArrayList<AppCatalogResource>();
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_FILE_SYSTEM);
Query q;
if ((fieldName.equals(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID)) || (fieldName.equals(ComputeResourceFileSystemConstants.PATH)) || (fieldName.equals(ComputeResourceFileSystemConstants.FILE_SYSTEM))) {
generator.setParameter(fieldName, value);
q = generator.selectQuery(em);
List<?> results = q.getResultList();
for (Object result : results) {
ComputeResourceFileSystem computeResourceFileSystem = (ComputeResourceFileSystem) result;
ComputeResourceFileSystemResource computeResourceFileSystemResource = (ComputeResourceFileSystemResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.COMPUTE_RESOURCE_FILE_SYSTEM, computeResourceFileSystem);
computeResourceFileSystemResources.add(computeResourceFileSystemResource);
}
} else {
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
logger.error("Unsupported field name for Compute Resource File System Resource.", new IllegalArgumentException());
throw new IllegalArgumentException("Unsupported field name for Compute Resource File System Resource.");
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return computeResourceFileSystemResources;
}
@Override
public List<AppCatalogResource> getAll() throws AppCatalogException {
return null;
}
@Override
public List<String> getAllIds() throws AppCatalogException {
return null;
}
@Override
public List<String> getIds(String fieldName, Object value) throws AppCatalogException {
List<String> computeResourceFileSystemResourceIDs = new ArrayList<String>();
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(COMPUTE_RESOURCE_FILE_SYSTEM);
Query q;
if ((fieldName.equals(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID)) || (fieldName.equals(ComputeResourceFileSystemConstants.PATH)) || (fieldName.equals(ComputeResourceFileSystemConstants.FILE_SYSTEM))) {
generator.setParameter(fieldName, value);
q = generator.selectQuery(em);
List<?> results = q.getResultList();
for (Object result : results) {
ComputeResourceFileSystem computeResourceFileSystem = (ComputeResourceFileSystem) result;
ComputeResourceFileSystemResource computeResourceFileSystemResource = (ComputeResourceFileSystemResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.COMPUTE_RESOURCE_FILE_SYSTEM, computeResourceFileSystem);
computeResourceFileSystemResourceIDs.add(computeResourceFileSystemResource.getComputeResourceId());
}
} else {
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
logger.error("Unsupported field name for Compute Resource File System Resource.", new IllegalArgumentException());
throw new IllegalArgumentException("Unsupported field name for Compute Resource File System Resource.");
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return computeResourceFileSystemResourceIDs;
}
@Override
public void save() throws AppCatalogException {
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
ComputeResourceFileSystem existingComputeResourceFileSystem = em.find(ComputeResourceFileSystem.class, new ComputeResourceFileSystem_PK(computeResourceId, fileSystem));
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
ComputeResourceFileSystem computeResourceFileSystem;
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
if (existingComputeResourceFileSystem == null) {
computeResourceFileSystem = new ComputeResourceFileSystem();
} else {
computeResourceFileSystem = existingComputeResourceFileSystem;
}
computeResourceFileSystem.setComputeResourceId(getComputeResourceId());
ComputeResource computeResource = em.find(ComputeResource.class, getComputeResourceId());
computeResourceFileSystem.setComputeResource(computeResource);
computeResourceFileSystem.setPath(getPath());
computeResourceFileSystem.setFileSystem(getFileSystem());
if (existingComputeResourceFileSystem == null) {
em.persist(computeResourceFileSystem);
} else {
em.merge(computeResourceFileSystem);
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public boolean isExists(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap<String, String>) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
ComputeResourceFileSystem computeResourceFileSystem = em.find(ComputeResourceFileSystem.class, new ComputeResourceFileSystem_PK(ids.get(ComputeResourceFileSystemConstants.COMPUTE_RESOURCE_ID), ids.get(ComputeResourceFileSystemConstants.FILE_SYSTEM)));
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
return computeResourceFileSystem != null;
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
public String getComputeResourceId() {
return computeResourceId;
}
public ComputeResourceResource getComputeHostResource() {
return computeHostResource;
}
public String getPath() {
return path;
}
public String getFileSystem() {
return fileSystem;
}
public void setComputeResourceId(String computeResourceId) {
this.computeResourceId=computeResourceId;
}
public void setComputeHostResource(ComputeResourceResource computeHostResource) {
this.computeHostResource=computeHostResource;
}
public void setPath(String path) {
this.path=path;
}
public void setFileSystem(String fileSystem) {
this.fileSystem=fileSystem;
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.messaging.servicebus;
import com.azure.core.amqp.exception.AmqpResponseCode;
import com.azure.messaging.servicebus.implementation.ServiceBusDescribedType;
import org.apache.qpid.proton.Proton;
import org.apache.qpid.proton.amqp.Symbol;
import org.apache.qpid.proton.amqp.messaging.AmqpValue;
import org.apache.qpid.proton.amqp.messaging.ApplicationProperties;
import org.apache.qpid.proton.amqp.messaging.DeliveryAnnotations;
import org.apache.qpid.proton.amqp.messaging.Footer;
import org.apache.qpid.proton.amqp.messaging.Header;
import org.apache.qpid.proton.message.Message;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.net.URI;
import java.time.Duration;
import java.time.OffsetDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import static com.azure.messaging.servicebus.TestUtils.APPLICATION_PROPERTIES;
import static com.azure.messaging.servicebus.TestUtils.SEQUENCE_NUMBER;
import static com.azure.messaging.servicebus.TestUtils.getMessage;
import static com.azure.messaging.servicebus.TestUtils.getServiceBusMessage;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
class ServiceBusMessageSerializerTest {
private final ServiceBusMessageSerializer serializer = new ServiceBusMessageSerializer();
@Test
void deserializeMessageNotNull() {
assertThrows(NullPointerException.class, () -> serializer.deserialize(null, ServiceBusMessage.class));
}
@Test
void deserializeClassNotNull() {
assertThrows(NullPointerException.class, () -> serializer.deserialize(Proton.message(), null));
}
@Test
void serializeObjectNotNull() {
assertThrows(NullPointerException.class, () -> serializer.serialize(null));
}
/**
* Verify that we cannot serialize something that is not of type Message.
*/
@Test
void cannotSerializeObject() {
String something = "oops";
assertThrows(IllegalArgumentException.class, () -> serializer.serialize(something));
}
/**
* Verify we can only deserialize supported classes.
*/
@Test
void cannotDeserializeObject() {
final org.apache.qpid.proton.message.Message message = getMessage("hello-world".getBytes(UTF_8));
assertThrows(IllegalArgumentException.class, () -> serializer.deserialize(message, ServiceBusReceiverAsyncClient.class));
assertThrows(IllegalArgumentException.class, () -> serializer.deserializeList(message, ServiceBusReceiverAsyncClient.class));
}
/**
* Verify that we can deserialize a proton-j message with all the correct contents to {@link ServiceBusMessage}.
*/
@Test
void deserializeMessage() {
// Arrange
final String payload = "hello-world";
final byte[] payloadBytes = payload.getBytes(UTF_8);
final org.apache.qpid.proton.message.Message message = getMessage(payloadBytes);
message.setAddress("a-to-address");
message.setContentType("some-content-type");
message.setCorrelationId("correlation-id-test");
message.setDeliveryCount(10);
message.setTtl(1045);
message.setMessageId("a-test-message-id");
message.setSubject("this is a label");
message.getProperties().setTo("this is a to property");
message.setReplyTo("reply-to-property");
message.setReplyToGroupId("reply-to-session-id-property");
message.setGroupId("session-id-as-a-group-id");
// Message Annotations
Map<Symbol, Object> expectedMessageAnnotations = message.getMessageAnnotations().getValue();
expectedMessageAnnotations.put(Symbol.valueOf("A"), "A value");
// Message Annotations
Map<Symbol, Object> expectedDeliveryAnnotations = new HashMap<>();
expectedDeliveryAnnotations.put(Symbol.valueOf("D"), "D value");
message.setDeliveryAnnotations(new DeliveryAnnotations(expectedDeliveryAnnotations));
Map<Symbol, Object> expectedFooterValues = new HashMap<>();
expectedFooterValues.put(Symbol.valueOf("footer1"), "footer value");
message.setFooter(new Footer(expectedFooterValues));
// Act
final ServiceBusReceivedMessage actualMessage = serializer.deserialize(message, ServiceBusReceivedMessage.class);
// Assert
// Verifying all our system properties were properly deserialized.
assertNotNull(actualMessage.getEnqueuedTime());
assertEquals(SEQUENCE_NUMBER, actualMessage.getSequenceNumber());
// Verifying that all our properties are set.
assertEquals(message.getTtl(), actualMessage.getTimeToLive().toMillis());
assertEquals(message.getSubject(), actualMessage.getSubject());
assertEquals(message.getReplyTo(), actualMessage.getReplyTo());
assertEquals(message.getDeliveryCount(), actualMessage.getDeliveryCount());
assertEquals(message.getProperties().getTo(), actualMessage.getTo());
assertEquals(message.getReplyToGroupId(), actualMessage.getReplyToSessionId());
assertEquals(message.getGroupId(), actualMessage.getSessionId());
assertEquals(message.getContentType(), actualMessage.getContentType());
assertEquals(message.getCorrelationId(), actualMessage.getCorrelationId());
assertValues(expectedMessageAnnotations, actualMessage.getRawAmqpMessage().getMessageAnnotations());
assertValues(expectedDeliveryAnnotations, actualMessage.getRawAmqpMessage().getDeliveryAnnotations());
assertValues(expectedFooterValues, actualMessage.getRawAmqpMessage().getFooter());
// Verifying our application properties are the same.
assertEquals(APPLICATION_PROPERTIES.size(), actualMessage.getApplicationProperties().size());
APPLICATION_PROPERTIES.forEach((key, value) -> {
Assertions.assertTrue(actualMessage.getApplicationProperties().containsKey(key));
assertEquals(value, actualMessage.getApplicationProperties().get(key));
});
// Verifying the contents of our message is the same.
assertEquals(payload, actualMessage.getBody().toString());
}
/**
* Verifies that we can serialize OffsetDateTime, Duration and URI in application properties.
*/
@Test
void serializeMessageWithSpecificApplicationProperties() {
String contents = "some contents";
String messageId = "messageId";
final ServiceBusMessage message = getServiceBusMessage(contents, messageId);
HashMap<String, Object> specificMap = new HashMap<>();
specificMap.put("uri", URI.create("https://www.github.com/"));
specificMap.put("duration", Duration.ZERO);
specificMap.put("offsetDateTime", OffsetDateTime.now());
message.getApplicationProperties().putAll(specificMap);
Message amqpMessage = serializer.serialize(message);
assertEquals(specificMap.size(), amqpMessage.getApplicationProperties().getValue().size());
AtomicInteger convertCount = new AtomicInteger();
specificMap.forEach((key, value) -> {
Assertions.assertTrue(amqpMessage.getApplicationProperties().getValue().containsKey(key));
if (value instanceof URI) {
assertEquals(((URI) value).toString(), ((ServiceBusDescribedType) amqpMessage.getApplicationProperties().getValue().get(key)).getDescribed());
convertCount.getAndIncrement();
} else if (value instanceof Duration) {
// For align with .net SDK ticks, convert will lose 2 digit.
convertCount.getAndIncrement();
} else if (value instanceof OffsetDateTime) {
// For align with .net SDK ticks, convert will lose 2 digit.
convertCount.getAndIncrement();
}
});
assertEquals(specificMap.size(), convertCount.get());
}
/**
* Message with specific type send from .net SDK.
*
* ServiceBusMessage message = new ServiceBusMessage("Hello world!");
* DateTime utcTime1 = DateTime.Parse("2022-02-24T08:23:23.443127200Z");
* utcTime1 = DateTime.SpecifyKind(utcTime1, DateTimeKind.Utc);
* message.ApplicationProperties.Add("time", utcTime2);
* message.ApplicationProperties.Add("span", TimeSpan.FromSeconds(10));
* message.ApplicationProperties.Add("uri", new Uri("https://www.github.com/"));
*/
@Test
void deserializeRealMessageFromByte() {
byte[] data = new byte[] {
0, 83, 112, -64, 10, 5, 64, 64, 112, 72, 25, 8, 0, 64, 67, 0, 83, 113, -63, 36, 2, -93, 16, 120, 45, 111,
112, 116, 45, 108, 111, 99, 107, 45, 116, 111, 107, 101, 110, -104, -99, -119, 88, -41, -124, -37, 69, 10,
-98, -95, -99, 119, -64, -61, 36, 90, 0, 83, 114, -63, 85, 6, -93, 19, 120, 45, 111, 112, 116, 45, 101, 110,
113, 117, 101, 117, 101, 100, 45, 116, 105, 109, 101, -125, 0, 0, 1, 127, 42, -30, 45, 43, -93, 21, 120, 45,
111, 112, 116, 45, 115, 101, 113, 117, 101, 110, 99, 101, 45, 110, 117, 109, 98, 101, 114, 85, 78, -93, 18,
120, 45, 111, 112, 116, 45, 108, 111, 99, 107, 101, 100, 45, 117, 110, 116, 105, 108, -125, 0, 0, 1, 127,
42, -30, -94, 106, 0, 83, 115, -64, 63, 13, -95, 32, 53, 98, 100, 50, 56, 100, 98, 97, 48, 56, 54, 99, 52,
98, 57, 99, 98, 55, 55, 49, 99, 100, 97, 97, 101, 102, 52, 51, 102, 102, 49, 98, 64, 64, 64, 64, 64, 64, 64,
-125, 0, 0, 1, 127, 114, -5, 53, 43, -125, 0, 0, 1, 127, 42, -30, 45, 43, 64, 64, 64, 0, 83, 116, -63, -118,
6, -95, 4, 116, 105, 109, 101, 0, -93, 29, 99, 111, 109, 46, 109, 105, 99, 114, 111, 115, 111, 102, 116, 58,
100, 97, 116, 101, 116, 105, 109, 101, 45, 111, 102, 102, 115, 101, 116, -127, 8, -39, -9, -79, -6, -116,
-83, 40, -95, 4, 115, 112, 97, 110, 0, -93, 22, 99, 111, 109, 46, 109, 105, 99, 114, 111, 115, 111, 102,
116, 58, 116, 105, 109, 101, 115, 112, 97, 110, -127, 0, 0, 0, 0, 5, -11, -31, 0, -95, 3, 117, 114, 105, 0,
-93, 17, 99, 111, 109, 46, 109, 105, 99, 114, 111, 115, 111, 102, 116, 58, 117, 114, 105, -95, 23, 104, 116,
116, 112, 115, 58, 47, 47, 119, 119, 119, 46, 103, 105, 116, 104, 117, 98, 46, 99, 111, 109, 47, 0, 83, 117,
-96, 12, 72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33
};
final Message amqpMessage = Proton.message();
amqpMessage.decode(data, 0, data.length);
amqpMessage.setHeader(new Header());
final ServiceBusReceivedMessage actualMessage = serializer.deserialize(amqpMessage, ServiceBusReceivedMessage.class);
AtomicInteger convertCount = new AtomicInteger();
HashMap<String, Object> specificMap = new HashMap<>();
specificMap.put("uri", URI.create("https://www.github.com/"));
specificMap.put("span", Duration.ofSeconds(2));
specificMap.put("time", OffsetDateTime.parse("2022-02-24T08:23:23.443127200Z"));
assertEquals(specificMap.size(), actualMessage.getApplicationProperties().size());
specificMap.forEach((key, value) -> {
Assertions.assertTrue(actualMessage.getApplicationProperties().containsKey(key));
if (value instanceof URI) {
assertEquals((URI) value, actualMessage.getApplicationProperties().get(key));
convertCount.getAndIncrement();
} else if (value instanceof Duration) {
assertEquals((Duration) value, specificMap.get("span"));
convertCount.getAndIncrement();
} else if (value instanceof OffsetDateTime) {
assertEquals((OffsetDateTime) value, specificMap.get("time"));
convertCount.getAndIncrement();
}
});
assertEquals(specificMap.size(), convertCount.get());
}
/**
* Verifies that an empty collection is returned if the status code was not {@link AmqpResponseCode#ACCEPTED}.
*/
@Test
void deserializeListMessagesNotOK() {
// Arrange
final Map<String, Object> properties = new HashMap<>();
properties.put("status-code", AmqpResponseCode.FORBIDDEN.getValue());
final Message message = Proton.message();
message.setBody(new AmqpValue("test"));
message.setApplicationProperties(new ApplicationProperties(properties));
// Act
final List<ServiceBusReceivedMessage> actual = serializer.deserializeList(message, ServiceBusReceivedMessage.class);
// Assert
Assertions.assertNotNull(actual);
Assertions.assertTrue(actual.isEmpty());
}
private void assertValues(Map<Symbol, Object> expected, Map<String, Object> actual) {
assertEquals(expected.size(), actual.size());
for (Map.Entry<Symbol, Object> expectedEntry : expected.entrySet()) {
assertEquals(expectedEntry.getValue(), actual.get(expectedEntry.getKey().toString()));
}
}
}
|
|
package com.krishagni.catissueplus.core.biospecimen.domain.factory.impl;
import static com.krishagni.catissueplus.core.common.PvAttributes.ETHNICITY;
import static com.krishagni.catissueplus.core.common.PvAttributes.GENDER;
import static com.krishagni.catissueplus.core.common.PvAttributes.GENOTYPE;
import static com.krishagni.catissueplus.core.common.PvAttributes.RACE;
import static com.krishagni.catissueplus.core.common.PvAttributes.VITAL_STATUS;
import static com.krishagni.catissueplus.core.common.service.PvValidator.areValid;
import static com.krishagni.catissueplus.core.common.service.PvValidator.isValid;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.BeanUtils;
import com.krishagni.catissueplus.core.administrative.domain.Site;
import com.krishagni.catissueplus.core.administrative.domain.factory.SiteErrorCode;
import com.krishagni.catissueplus.core.biospecimen.domain.Participant;
import com.krishagni.catissueplus.core.biospecimen.domain.ParticipantMedicalIdentifier;
import com.krishagni.catissueplus.core.biospecimen.domain.factory.ParticipantErrorCode;
import com.krishagni.catissueplus.core.biospecimen.domain.factory.ParticipantFactory;
import com.krishagni.catissueplus.core.biospecimen.domain.factory.ParticipantUtil;
import com.krishagni.catissueplus.core.biospecimen.events.ParticipantDetail;
import com.krishagni.catissueplus.core.biospecimen.events.PmiDetail;
import com.krishagni.catissueplus.core.biospecimen.repository.DaoFactory;
import com.krishagni.catissueplus.core.common.errors.ActivityStatusErrorCode;
import com.krishagni.catissueplus.core.common.errors.ErrorType;
import com.krishagni.catissueplus.core.common.errors.OpenSpecimenException;
import com.krishagni.catissueplus.core.common.util.Status;
public class ParticipantFactoryImpl implements ParticipantFactory {
private DaoFactory daoFactory;
public void setDaoFactory(DaoFactory daoFactory) {
this.daoFactory = daoFactory;
}
@Override
public Participant createParticipant(ParticipantDetail detail) {
Participant participant = new Participant();
OpenSpecimenException ose = new OpenSpecimenException(ErrorType.USER_ERROR);
setParticipantAttrs(detail, participant, false, ose);
ose.checkAndThrow();
return participant;
}
@Override
public Participant createParticipant(Participant existing, ParticipantDetail detail) {
Participant participant = new Participant();
BeanUtils.copyProperties(existing, participant, new String[] {"cprs"});
OpenSpecimenException ose = new OpenSpecimenException(ErrorType.USER_ERROR);
setParticipantAttrs(detail, participant, true, ose);
ose.checkAndThrow();
return participant;
}
private void setParticipantAttrs(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException ose) {
if (participant.getId() == null && detail.getId() != null) {
participant.setId(detail.getId());
}
setSsn(detail, participant, partial, ose);
setName(detail, participant, partial, ose);
setVitalStatus(detail, participant, partial, ose);
setBirthDate(detail, participant, partial, ose);
setDeathDate(detail, participant, partial, ose);
setActivityStatus(detail, participant, partial, ose);
setSexGenotype(detail, participant, partial, ose);
setGender(detail, participant, partial, ose);
setRace(detail, participant, partial, ose);
setEthnicity(detail, participant, partial, ose);
setPmi(detail, participant, partial, ose);
if (!partial || detail.isAttrModified("empi")) {
participant.setEmpi(detail.getEmpi());
}
}
private void setSsn(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("ssn")) {
return;
}
String ssn = detail.getSsn();
if (StringUtils.isBlank(ssn)) {
participant.setSocialSecurityNumber(null);
return;
}
if (isValidSsn(ssn)) {
if (partial && !ssn.equals(participant.getSocialSecurityNumber())) {
ParticipantUtil.ensureUniqueSsn(daoFactory, ssn, oce);
}
participant.setSocialSecurityNumber(ssn);
} else {
oce.addError(ParticipantErrorCode.INVALID_SSN);
}
}
private void setName(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException ose) {
if (!partial || detail.isAttrModified("firstName")) {
participant.setFirstName(detail.getFirstName());
}
if (!partial || detail.isAttrModified("middleName")) {
participant.setMiddleName(detail.getMiddleName());
}
if (!partial || detail.isAttrModified("lastName")) {
participant.setLastName(detail.getLastName());
}
}
private void setVitalStatus(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("vitalStatus")) {
return;
}
String vitalStatus = detail.getVitalStatus();
if (!isValid(VITAL_STATUS, vitalStatus)) {
oce.addError(ParticipantErrorCode.INVALID_VITAL_STATUS);
return;
}
participant.setVitalStatus(vitalStatus);
}
private void setBirthDate(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("birthDate")) {
return;
}
Date birthDate = detail.getBirthDate();
if (birthDate == null) {
return;
}
if (birthDate.after(Calendar.getInstance().getTime())) {
oce.addError(ParticipantErrorCode.INVALID_BIRTH_DATE);
return;
}
participant.setBirthDate(birthDate);
}
private void setDeathDate(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("deathDate")) {
return;
}
Date deathDate = detail.getDeathDate();
if (deathDate == null) {
return;
}
if (participant.getBirthDate() != null && deathDate.before(participant.getBirthDate())) {
oce.addError(ParticipantErrorCode.INVALID_DEATH_DATE);
}
// TODO: how do we set vital status to dead now?
participant.setDeathDate(deathDate);
}
private void setActivityStatus(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("activityStatus")) {
return;
}
String status = detail.getActivityStatus();
if (StringUtils.isBlank(status)) {
participant.setActivityStatus(Status.ACTIVITY_STATUS_ACTIVE.getStatus());
return;
}
if (!Status.isValidActivityStatus(status)) {
oce.addError(ActivityStatusErrorCode.INVALID);
return;
}
participant.setActivityStatus(status);
}
private void setSexGenotype(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("sexGenotype")) {
return;
}
String genotype = detail.getSexGenotype();
if (!isValid(GENOTYPE, genotype)) {
oce.addError(ParticipantErrorCode.INVALID_GENOTYPE);
return;
}
participant.setSexGenotype(genotype);
}
private void setGender(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("gender")) {
return;
}
String gender = detail.getGender();
if (!isValid(GENDER, gender)) {
oce.addError(ParticipantErrorCode.INVALID_GENDER);
return;
}
participant.setGender(gender);
}
private void setRace(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("races")) {
return;
}
Set<String> races = detail.getRaces();
if (CollectionUtils.isEmpty(races)) {
return;
}
if (!areValid(RACE, races)) {
oce.addError(ParticipantErrorCode.INVALID_RACE);
return;
}
participant.setRaces(races);
}
private void setEthnicity(ParticipantDetail detail, Participant participant, boolean partial, OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("ethnicity")) {
return;
}
String ethnicity = detail.getEthnicity();
if (!isValid(ETHNICITY, ethnicity)) {
oce.addError(ParticipantErrorCode.INVALID_ETHNICITY);
return;
}
participant.setEthnicity(ethnicity);
}
private void setPmi(
ParticipantDetail detail,
Participant participant,
boolean partial,
OpenSpecimenException oce) {
if (partial && !detail.isAttrModified("pmis")) {
return;
}
if (partial) {
boolean unique = ParticipantUtil.ensureUniquePmis(
daoFactory,
detail.getPmis(),
participant,
oce);
if (!unique) {
return;
}
}
Set<ParticipantMedicalIdentifier> newPmis = new HashSet<ParticipantMedicalIdentifier>();
if (CollectionUtils.isEmpty(detail.getPmis())) {
participant.setPmis(newPmis);
} else {
Set<String> siteNames = new HashSet<String>();
boolean dupSite = false;
for (PmiDetail pmiDetail : detail.getPmis()) {
ParticipantMedicalIdentifier pmi = getPmi(pmiDetail, oce);
if (pmi == null) {
continue;
}
if (!dupSite && !siteNames.add(pmiDetail.getSiteName())) {
dupSite = true;
oce.addError(ParticipantErrorCode.DUP_MRN_SITE, pmiDetail.getSiteName());
}
pmi.setParticipant(participant);
newPmis.add(pmi);
}
}
participant.setPmis(newPmis);
}
private ParticipantMedicalIdentifier getPmi(PmiDetail pmiDetail, OpenSpecimenException oce) {
Site site = daoFactory.getSiteDao().getSiteByName(pmiDetail.getSiteName());
if (site == null) {
oce.addError(SiteErrorCode.NOT_FOUND);
return null;
}
if (StringUtils.isBlank(pmiDetail.getMrn())) {
oce.addError(ParticipantErrorCode.MRN_REQUIRED);
return null;
}
ParticipantMedicalIdentifier pmi = new ParticipantMedicalIdentifier();
pmi.setSite(site);
pmi.setMedicalRecordNumber(pmiDetail.getMrn());
return pmi;
}
private boolean isValidSsn(String ssn) {
try {
if (StringUtils.isBlank(ssn)) {
return true;
}
return SSN_PATTERN.matcher(ssn).matches();
} catch (Exception exp) {
return false;
}
}
private static final Pattern SSN_PATTERN = Pattern.compile("[0-9]{3}-[0-9]{2}-[0-9]{4}");
}
|
|
/*
* Licensed to the University of California, Berkeley under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package tachyon.master;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import tachyon.Constants;
import tachyon.UnderFileSystem;
import tachyon.client.TachyonFS;
import tachyon.conf.TachyonConf;
import tachyon.thrift.NetAddress;
import tachyon.util.CommonUtils;
import tachyon.util.NetworkUtils;
import tachyon.worker.TachyonWorker;
/**
* Local Tachyon cluster for unit tests.
*/
public final class LocalTachyonCluster {
public static void main(String[] args) throws Exception {
LocalTachyonCluster cluster = new LocalTachyonCluster(100, 8 * Constants.MB, Constants.GB);
cluster.start();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
cluster.stop();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
cluster = new LocalTachyonCluster(100, 8 * Constants.MB, Constants.GB);
cluster.start();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
cluster.stop();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
}
private TachyonWorker mWorker = null;
private long mWorkerCapacityBytes;
private int mUserBlockSize;
private int mQuotaUnitBytes;
private String mTachyonHome;
private String mWorkerDataFolder;
private Thread mWorkerThread = null;
private String mLocalhostName = null;
private LocalTachyonMaster mMaster;
private TachyonConf mMasterConf;
private TachyonConf mWorkerConf;
public LocalTachyonCluster(long workerCapacityBytes, int quotaUnitBytes, int userBlockSize) {
mWorkerCapacityBytes = workerCapacityBytes;
mQuotaUnitBytes = quotaUnitBytes;
mUserBlockSize = userBlockSize;
}
public TachyonFS getClient() throws IOException {
return mMaster.getClient();
}
public String getEditLogPath() {
return mMaster.getEditLogPath();
}
public String getImagePath() {
return mMaster.getImagePath();
}
public TachyonConf getMasterTachyonConf() {
return mMasterConf;
}
public InetSocketAddress getMasterAddress() {
return new InetSocketAddress(mLocalhostName, getMasterPort());
}
public String getMasterHostname() {
return mLocalhostName;
}
public MasterInfo getMasterInfo() {
return mMaster.getMasterInfo();
}
public String getMasterUri() {
return mMaster.getUri();
}
public int getMasterPort() {
return mMaster.getMetaPort();
}
public String getTachyonHome() {
return mTachyonHome;
}
public String getTempFolderInUnderFs() {
return mMasterConf.get(Constants.UNDERFS_ADDRESS, "/underfs");
}
public TachyonWorker getWorker() {
return mWorker;
}
public TachyonConf getWorkerTachyonConf() {
return mWorkerConf;
}
public NetAddress getWorkerAddress() {
return new NetAddress(mLocalhostName, getWorkerPort(), getWorkerDataPort());
}
public String getWorkerDataFolder() {
return mWorkerDataFolder;
}
public int getWorkerPort() {
return mWorker.getMetaPort();
}
public int getWorkerDataPort() {
return mWorker.getDataPort();
}
private void deleteDir(String path) throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(path, getMasterTachyonConf());
if (ufs.exists(path) && !ufs.delete(path, true)) {
throw new IOException("Folder " + path + " already exists but can not be deleted.");
}
}
private void mkdir(String path) throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(path, getMasterTachyonConf());
if (ufs.exists(path)) {
ufs.delete(path, true);
}
if (!ufs.mkdirs(path, true)) {
throw new IOException("Failed to make folder: " + path);
}
}
public void start() throws IOException {
mTachyonHome =
File.createTempFile("Tachyon", "U" + System.currentTimeMillis()).getAbsolutePath();
mWorkerDataFolder = "/datastore";
mLocalhostName = NetworkUtils.getLocalHostName();
mMasterConf = new TachyonConf();
mMasterConf.set(Constants.IN_TEST_MODE, "true");
mMasterConf.set(Constants.TACHYON_HOME, mTachyonHome);
mMasterConf.set(Constants.USER_QUOTA_UNIT_BYTES, Integer.toString(mQuotaUnitBytes));
mMasterConf.set(Constants.USER_DEFAULT_BLOCK_SIZE_BYTE, Integer.toString(mUserBlockSize));
mMasterConf.set(Constants.USER_REMOTE_READ_BUFFER_SIZE_BYTE, "64");
// Lower the number of threads that the cluster will spin off.
// default thread overhead is too much.
mMasterConf.set(Constants.MASTER_SELECTOR_THREADS, "1");
mMasterConf.set(Constants.MASTER_SERVER_THREADS, "2");
mMasterConf.set(Constants.MASTER_WEB_THREAD_COUNT, "1");
// re-build the dir to set permission to 777
deleteDir(mTachyonHome);
mkdir(mTachyonHome);
mMaster = LocalTachyonMaster.create(mTachyonHome, mMasterConf);
mMaster.start();
mkdir(mMasterConf.get(Constants.UNDERFS_DATA_FOLDER, "/tachyon/data"));
mkdir(mMasterConf.get(Constants.UNDERFS_WORKERS_FOLDER, "/tachyon/workers"));
CommonUtils.sleepMs(null, 10);
mWorkerConf = new TachyonConf(mMasterConf);
mWorkerConf.set(Constants.MASTER_PORT, getMasterPort() + "");
mWorkerConf.set(Constants.MASTER_WEB_PORT, (getMasterPort() + 1) + "");
mWorkerConf.set(Constants.WORKER_PORT, "0");
mWorkerConf.set(Constants.WORKER_DATA_PORT, "0");
mWorkerConf.set(Constants.WORKER_DATA_FOLDER, mWorkerDataFolder);
mWorkerConf.set(Constants.WORKER_MEMORY_SIZE, Long.toString(mWorkerCapacityBytes));
mWorkerConf.set(Constants.WORKER_TO_MASTER_HEARTBEAT_INTERVAL_MS, "15");
mWorkerConf.set(Constants.WORKER_SELECTOR_THREADS, Integer.toString(1));
mWorkerConf.set(Constants.WORKER_SERVER_THREADS, Integer.toString(2));
mWorkerConf.set(Constants.WORKER_NETTY_WORKER_THREADS, Integer.toString(2));
mWorkerConf.set("tachyon.worker.hierarchystore.level0.alias", "MEM");
mWorkerConf.set("tachyon.worker.hierarchystore.level0.dirs.path", mTachyonHome + "/ramdisk");
mWorkerConf.set("tachyon.worker.hierarchystore.level0.dirs.quota", mWorkerCapacityBytes + "");
int maxLevel = mWorkerConf.getInt(Constants.WORKER_MAX_HIERARCHY_STORAGE_LEVEL, 1);
for (int level = 1; level < maxLevel; level ++) {
String tierLevelDirPath = "tachyon.worker.hierarchystore.level" + level + ".dirs.path";
String[] dirPaths = mWorkerConf.get(tierLevelDirPath, "/mnt/ramdisk").split(",");
String newPath = "";
for (int i = 0; i < dirPaths.length; i ++) {
newPath += mTachyonHome + dirPaths[i] + ",";
}
mWorkerConf.set("tachyon.worker.hierarchystore.level" + level + ".dirs.path",
newPath.substring(0, newPath.length() - 1));
}
mWorker =
TachyonWorker.createWorker(new InetSocketAddress(mLocalhostName, getMasterPort()),
new InetSocketAddress(mLocalhostName, 0), 0, 1, 1, 1, mWorkerConf);
Runnable runWorker = new Runnable() {
@Override
public void run() {
try {
mWorker.start();
} catch (Exception e) {
throw new RuntimeException(e + " \n Start Worker Error \n" + e.getMessage(), e);
}
}
};
mWorkerThread = new Thread(runWorker);
mWorkerThread.start();
}
/**
* Stop both of the tachyon and underfs service threads.
*
* @throws Exception
*/
public void stop() throws Exception {
stopTFS();
stopUFS();
}
/**
* Stop the tachyon filesystem's service thread only
*
* @throws Exception
*/
public void stopTFS() throws Exception {
mMaster.stop();
mWorker.stop();
System.clearProperty("tachyon.home");
System.clearProperty("tachyon.worker.port");
System.clearProperty("tachyon.worker.data.port");
System.clearProperty("tachyon.worker.data.folder");
System.clearProperty("tachyon.worker.memory.size");
System.clearProperty("tachyon.user.remote.read.buffer.size.byte");
System.clearProperty("tachyon.worker.to.master.heartbeat.interval.ms");
System.clearProperty("tachyon.worker.selector.threads");
System.clearProperty("tachyon.worker.server.threads");
System.clearProperty("tachyon.worker.hierarchystore.level.max");
System.clearProperty("tachyon.worker.network.netty.worker.threads");
}
/**
* Cleanup the underfs cluster test folder only
*
* @throws Exception
*/
public void stopUFS() throws Exception {
mMaster.cleanupUnderfs();
}
public void stopWorker() throws Exception {
mMaster.clearClients();
mWorker.stop();
}
}
|
|
/**
* Copyright (c) 2021 Sam Baskinger
*/
package com.github.basking2.sdsai;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* A k-d tree that operates much like a traditional binary search tree.
*
* As such, it is possible for this K-D tree to build a linked list of nodes, in a particularly worst case of insertions.
*
* It is, however, simple, and does not require knowledge of the dimension range.
*
* @param <K> The key value of each element in the multidimensional key.
* @param <V> The value.
*/
public class KDTree<K extends Comparable<K>, V> {
private Node head;
private int size;
public KDTree() {
this.head = null;
this.size = 0;
}
public V find(final K[] key) {
if (head != null) {
final Node n = head.find(key, 0);
if (n == null) {
return null;
}
return n.value;
}
else {
return null;
}
}
public V findClosest(final K[] key) {
if (head != null) {
final Node n = head.findClosest(key, 0);
return n.value;
}
else {
return null;
}
}
public void add(final K[] key, final V value) {
if (head == null) {
head = new Node(key, value);
size = 1;
}
else {
head.add(key, 0, value);
size++;
}
}
public boolean isEmpty() {
return head == null;
}
public void clear() {
head = null;
size = 0;
}
public int size() {
return size;
}
public K[] minKey() {
if (head == null) {
return null;
}
return head.min().key;
}
public V min() {
if (head == null) {
return null;
}
return head.min().value;
}
public K[] maxKey() {
if (head == null) {
return null;
}
return head.max().key;
}
public V max() {
if (head == null) {
return null;
}
return head.max().value;
}
public V removeMin() {
if (head == null) {
return null;
}
size--;
if (head.left == null) {
// Head is the min. Remove it.
if (head.right == null) {
// Last node.
size = 0;
final Node n = head;
head = null;
return n.value;
}
else {
final Node n = head;
final Node min = n.right.removeMinChild();
if (min == null) {
// Right node is the min.
head = n.right;
}
else {
head = min;
min.right = n.right;
}
return n.value;
}
}
return head.removeMinChild().value;
}
public V removeMax() {
if (head == null) {
return null;
}
size--;
if (head.right == null) {
// Head is the max. Remove it.
if (head.left == null) {
// Last node.
size = 0;
final Node n = head;
head = null;
return n.value;
}
else {
final Node n = head;
final Node max = n.left.removeMaxChild();
if (max == null) {
// Left node is the max.
head = n.left;
}
else {
head = max;
max.left = n.left;
}
return n.value;
}
}
return head.removeMaxChild().value;
}
public V remove(final K[] key) {
if (head == null) {
return null;
}
size--;
final Node removed = head.remove(null, key, 0);
if (removed == head) {
// Node was not removed! We must do it ourselves.
if (head.left != null) {
final Node newRoot = head.left.removeMaxChild();
if (newRoot != null) {
newRoot.left = head.left;
newRoot.right = head.right;
head = newRoot;
return removed.value;
}
}
if (head.right != null) {
final Node newRoot = head.right.removeMinChild();
if (newRoot != null) {
newRoot.left = head.left;
newRoot.right = head.right;
head = newRoot;
return removed.value;
}
}
if (head.left != null) {
// Left is our new root.
head.left.right = head.right;
head = head.left;
return removed.value;
}
if (head.right != null) {
// Right is our new root.
head.right.left = head.left;
head = head.right;
return removed.value;
}
// OH! Head is the last element! Remove it.
head = null;
size = 0;
return removed.value;
}
else if (removed == null) {
return null;
}
else {
return removed.value;
}
}
public Iterator<K[]> breadthFirstKeys() {
if (head == null) {
@SuppressWarnings("unchecked")
final Iterator<K[]> i = (Iterator<K[]>) Collections.EMPTY_LIST.iterator();
return i;
}
final Iterator<Node> itr = head.iterator();
return new Iterator<K[]>(){
@Override
public boolean hasNext() {
return itr.hasNext();
}
@Override
public K[] next() {
return itr.next().key;
}
};
}
public Iterator<V> breadthFirstValues() {
if (head == null) {
@SuppressWarnings("unchecked")
final Iterator<V> i = (Iterator<V>) Collections.EMPTY_LIST.iterator();
return i;
}
final Iterator<Node> itr = head.iterator();
return new Iterator<V>(){
@Override
public boolean hasNext() {
return itr.hasNext();
}
@Override
public V next() {
return itr.next().value;
}
};
}
private class Node implements Iterable<Node> {
private K[] key;
private V value;
private Node left;
private Node right;
public Node(final K[] key, final V value) {
this.key = key;
this.value = value;
}
/**
* Insert the value at the key along the axis in this node.
*
* @param key
* @param axis
* @param value
*/
public void add(final K[] key, final int axis, final V value) {
final int cmp = key[axis].compareTo(this.key[axis]);
if (cmp <= 0) {
if (left == null) {
left = new Node(key, value);
}
else {
left.add(key, (axis + 1) % key.length, value);
}
}
else if (right == null) {
right = new Node(key, value);
} else {
right.add(key, (axis + 1) % key.length, value);
}
}
public boolean keyEquals(final K[] key) {
for (int i = 0; i < key.length; i++) {
if (this.key[i].compareTo(key[i]) != 0) {
return false;
}
}
return true;
}
public Node find(final K[] key, final int axis) {
final int cmp = key[axis].compareTo(this.key[axis]);
if (cmp == 0) {
if (keyEquals(key)) {
// If this is totally equal.
return this;
}
else if (left == null) {
return null;
}
else {
return left.find(key, (axis + 1) % key.length);
}
}
else if (cmp < 0) {
if (left == null) {
return null;
}
else {
return left.find(key, (axis + 1) % key.length);
}
}
else {
if (right == null) {
return null;
}
else {
return right.find(key, (axis + 1) % key.length);
}
}
}
public Node findClosest(final K[] key, final int axis) {
final int cmp = key[axis].compareTo(this.key[axis]);
if (cmp == 0) {
if (keyEquals(key)) {
// If this is totally equal.
return this;
}
else if (left == null) {
return this;
}
else {
return left.findClosest(key, (axis + 1) % key.length);
}
}
else if (cmp < 0) {
if (left == null) {
return this;
}
else {
return left.findClosest(key, (axis + 1) % key.length);
}
}
else {
if (right == null) {
return this;
}
else {
return right.findClosest(key, (axis + 1) % key.length);
}
}
}
public Node min() {
Node n = this;
while (n.left != null) {
n = n.left;
}
return n;
}
public Node max() {
Node n = this;
while (n.right != null) {
n = n.right;
}
return n;
}
/**
* Remove the right-most node from this tree, ignoring the root.
*
* @return The removed node or null if there are no adequate child nodes.
*/
public Node removeMaxChild() {
if (this.right == null) {
return null;
}
Node p = this;
Node c = this.right;
while (c.right != null) {
p = c;
c = c.right;
}
if (c.left != null) {
final Node replacementNode = c.left.removeMaxChild();
if (replacementNode != null) {
p.right = replacementNode;
replacementNode.left = c.left;
}
else {
p.right = c.left;
}
}
else {
p.right = null;
}
c.right = null;
c.left = null;
return c;
}
/**
* Remove the right-most node from this tree, ignoring the root.
*
* @return The removed node or null if there are no adequate child nodes.
*/
public Node removeMinChild() {
if (this.left == null) {
return null;
}
Node p = this;
Node c = this.left;
while (c.left != null) {
p = c;
c = c.left;
}
if (c.right != null) {
final Node replacementNode = c.right.removeMinChild();
if (replacementNode != null) {
p.left = replacementNode;
replacementNode.right = c.right;
}
else {
p.left = c.right;
}
}
else {
p.left = null;
}
c.right = null;
c.left = null;
return c;
}
/**
* Remove the right node from this node and return it.
*
* @return The removed node or null.
*/
public Node removeRightChild() {
if (right == null) {
return null;
}
final Node removed = right;
final Node replacement = right.removeMinChild();
if (replacement != null) {
replacement.right = removed.right;
right = replacement;
}
else {
right = removed.right;
}
removed.left = null;
removed.right = null;
return removed;
}
/**
* Remove the left node from this node and return it.
*
* @return The removed node or null.
*/
public Node removeLeftChild() {
if (left == null) {
return null;
}
final Node removed = left;
final Node replacement = left.removeMaxChild();
if (replacement != null) {
replacement.left = removed.left;
left = replacement;
}
else {
left = removed.left;
}
removed.left = null;
removed.right = null;
return removed;
}
/**
* Find the given key in this subtree and remove the node associated with it.
*
* If parent is null and {@code this} is returned, no removal was done.
* Callers of this may use this behavior to determine if they must remove
* the root node of a tree.
*
* <pre>
* {@code
* Node removed = root.remove(null, key, 0);
* if (removed == root) {
* // Find a new root for the tree and remove the given one.
* ...
* }
* }
* </pre>
*
* @param parent The parent node, or null if this node is the root of a tree.
* @param key The key to identify the node by.
* @param axis The axis we are considering. This starts at 0.
* @return The node holding the key. If parent == null the node is not removed.
*/
public Node remove(final Node parent, final K[] key, final int axis) {
final int cmp = key[axis].compareTo(this.key[axis]);
if (cmp == 0) {
if (keyEquals(key)) {
// Remove this node from its parent.
if (parent == null) {
return this;
}
else if (parent.left == this) {
return parent.removeLeftChild();
}
else {
return parent.removeRightChild();
}
}
else if (left == null) {
return null;
}
else {
return left.remove(this, key, (axis + 1) % key.length);
}
}
else if (cmp < 0) {
if (left == null) {
return null;
}
else {
return left.remove(this, key, (axis + 1) % key.length);
}
}
else {
if (right == null) {
return null;
}
else {
return right.remove(this, key, (axis + 1) % key.length);
}
}
}
@Override
public Iterator<Node> iterator() {
final List<Node> nodes = new ArrayList<>();
nodes.add(head);
// A very simple breadth-first traversal.
return new Iterator<Node>() {
@Override
public boolean hasNext() {
return ! nodes.isEmpty();
}
@Override
public Node next() {
final Node n = nodes.get(0);
if (n.left != null) {
nodes.add(n.left);
}
if (n.right != null) {
nodes.add(n.right);
}
return n;
}
};
}
}
}
|
|
/**
* Copyright Ambud Sharma
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.srotya.sidewinder.core.api.grafana;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.codahale.metrics.Timer.Context;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import com.srotya.sidewinder.core.api.DatabaseOpsApi;
import com.srotya.sidewinder.core.functions.list.FunctionTable;
import com.srotya.sidewinder.core.monitoring.MetricsRegistryService;
import com.srotya.sidewinder.core.storage.ItemNotFoundException;
import com.srotya.sidewinder.core.storage.RejectException;
import com.srotya.sidewinder.core.storage.StorageEngine;
import com.srotya.sidewinder.core.utils.InvalidFilterException;
/**
* API specifically for designed for Grafana Sidewinder Datasource. This API is
* currently NOT REST compliant and is designed to be purely functional.
*
* @author ambud
*/
@Path("/{" + DatabaseOpsApi.DB_NAME + "}")
public class GrafanaQueryApiv1 {
private static final Logger logger = Logger.getLogger(GrafanaQueryApiv1.class.getName());
private StorageEngine engine;
private TimeZone tz;
private Meter grafanaQueryCounter;
private Timer grafanaQueryLatency;
public GrafanaQueryApiv1(StorageEngine engine) throws SQLException {
this.engine = engine;
tz = TimeZone.getDefault();
MetricRegistry registry = MetricsRegistryService.getInstance().getInstance("grafana");
grafanaQueryCounter = registry.meter("queries");
grafanaQueryLatency = registry.timer("latency");
}
@Path("/hc")
@GET
public String getHealth(@PathParam(DatabaseOpsApi.DB_NAME) String dbName) throws Exception {
logger.fine("Checking db name:" + dbName);
if (engine.checkIfExists(dbName)) {
return "true";
} else {
throw new NotFoundException("Database:" + dbName + " doesn't exist");
}
}
@Path("/query")
@POST
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public List<GrafanaOutput> queryData(@PathParam(DatabaseOpsApi.DB_NAME) String dbName, String queryString)
throws ParseException {
grafanaQueryCounter.mark();
Context time = grafanaQueryLatency.time();
Gson gson = new GsonBuilder().setPrettyPrinting().create();
logger.log(Level.FINE,
() -> "Grafana query:" + dbName + "\t" + gson.toJson(gson.fromJson(queryString, JsonObject.class)));
JsonObject json = gson.fromJson(queryString, JsonObject.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
JsonObject range = json.get("range").getAsJsonObject();
long startTs = sdf.parse(range.get("from").getAsString()).getTime();
long endTs = sdf.parse(range.get("to").getAsString()).getTime();
startTs = tz.getOffset(startTs) + startTs;
endTs = tz.getOffset(endTs) + endTs;
List<TargetSeries> targetSeries = new ArrayList<>();
try {
GrafanaUtils.extractTargetsFromJson(json, targetSeries);
} catch (InvalidFilterException e) {
throw new BadRequestException(e.getMessage());
}
List<GrafanaOutput> output = new ArrayList<>();
logger.log(Level.FINE,
"Extracted targets from query json, target count:" + targetSeries.size() + " " + new Date(startTs));
for (TargetSeries targetSeriesEntry : targetSeries) {
logger.log(Level.FINE, () -> "Running grafana query fetch for:" + targetSeriesEntry);
try {
GrafanaUtils.queryAndGetData(engine, dbName, startTs, endTs, output, targetSeriesEntry);
} catch (IOException e) {
throw new InternalServerErrorException(e);
}
}
time.stop();
// Adding sorted output so series colors do not change in grafana
Collections.sort(output);
logger.log(Level.FINEST, () -> {
StringBuilder builder = new StringBuilder();
for (GrafanaOutput out : output) {
builder.append(out.getTarget() + " " + out.getDatapoints().size() + "\n");
}
return builder.toString();
});
logger.log(Level.FINER, () -> "Grafana query result size:" + output.size());
return output;
}
@Path("/query/measurements")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryMeasurementNames(@PathParam(DatabaseOpsApi.DB_NAME) String dbName, String queryString) {
logger.log(Level.FINE, () -> "Query measurements for db:" + dbName + "\t" + queryString);
try {
if (queryString != null && !queryString.isEmpty()) {
JsonObject query = new Gson().fromJson(queryString, JsonObject.class);
if (query.has("target")) {
String target = query.get("target").getAsString();
if (target.startsWith("measurement:")) {
return engine.getTagKeysForMeasurement(dbName, target.replace("measurement:", ""));
} else if (target.contains("field:")) {
return engine.getFieldsForMeasurement(dbName, target.replace("field:", ""));
} else {
return engine.getMeasurementsLike(dbName, target);
}
}
}
return engine.getMeasurementsLike(dbName, "");
} catch (RejectException e) {
throw new BadRequestException(e);
} catch (Exception e) {
e.printStackTrace();
throw new InternalServerErrorException(e.getMessage());
}
}
@Path("/query/tags")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryTagKeys(@PathParam(DatabaseOpsApi.DB_NAME) String dbName, String queryString) {
logger.log(Level.FINE, () -> "Query tags for db:" + dbName + "\t" + queryString);
if (queryString == null || queryString.trim().isEmpty()) {
throw new BadRequestException();
}
try {
Gson gson = new Gson();
JsonObject measurement = gson.fromJson(queryString, JsonObject.class);
if (measurement.has("target")) {
return engine.getTagKeysForMeasurement(dbName, measurement.get("target").getAsString());
} else {
throw new ItemNotFoundException("Bad request");
}
} catch (ItemNotFoundException e) {
throw new NotFoundException(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
throw new InternalServerErrorException(e.getMessage());
}
}
@Path("/query/tagvs")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryTagValues(@PathParam(DatabaseOpsApi.DB_NAME) String dbName, String queryString) {
logger.log(Level.FINE, () -> "Query tag values for db:" + dbName + "\t" + queryString);
if (queryString == null || queryString.trim().isEmpty()) {
throw new BadRequestException();
}
try {
Gson gson = new Gson();
JsonObject measurement = gson.fromJson(queryString, JsonObject.class);
if (measurement.has("target")) {
return engine.getTagValuesForMeasurement(dbName, measurement.get("target").getAsString(),
measurement.get("tag").getAsString());
} else {
throw new ItemNotFoundException("Bad request");
}
} catch (ItemNotFoundException e) {
throw new NotFoundException(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
throw new InternalServerErrorException(e.getMessage());
}
}
@Path("/query/fields")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryFields(@PathParam(DatabaseOpsApi.DB_NAME) String dbName, String queryString) {
try {
Gson gson = new Gson();
JsonObject measurement = gson.fromJson(queryString, JsonObject.class);
if (measurement.has("target")) {
Set<String> response = engine.getFieldsForMeasurement(dbName, measurement.get("target").getAsString());
logger.log(Level.FINE, () -> "Query fields for db:" + dbName + "\t" + response + "\t" + queryString);
return response;
} else {
throw new ItemNotFoundException("Bad request");
}
} catch (ItemNotFoundException e) {
throw new NotFoundException(e.getMessage());
} catch (Exception e) {
throw new InternalServerErrorException(e.getMessage());
}
}
@Path("/query/ctypes")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryConditionTypes() {
return new HashSet<>(Arrays.asList("AND", "OR"));
}
@Path("/query/otypes")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryOperatorTypes() {
return new HashSet<>(Arrays.asList("=", ">", "<", ">=", "<=", "~"));
}
@Path("/query/aggregators")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryAggregators() {
return FunctionTable.get().listFunctions();
}
@Path("/query/units")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public Set<String> queryTimeUnits() {
return new HashSet<>(Arrays.asList("secs", "mins", "hours", "days", "weeks", "months", "years"));
}
// @Path("/rawquery")
// @POST
// @Produces({ MediaType.APPLICATION_JSON })
// @Consumes({ MediaType.APPLICATION_JSON })
// public List<Target> rawQuery(@PathParam(DatabaseOpsApi.DB_NAME) String
// dbName, String query) throws ParseException {
// grafanaQueryCounter.mark();
// Context time = grafanaQueryLatency.time();
// Gson gson = new GsonBuilder().create();
// JsonObject json = gson.fromJson(query, JsonObject.class);
// SimpleDateFormat sdf = new
// SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
// JsonObject range = json.get("range").getAsJsonObject();
// long startTs = sdf.parse(range.get("from").getAsString()).getTime();
// long endTs = sdf.parse(range.get("to").getAsString()).getTime();
//
// startTs = tz.getOffset(startTs) + startTs;
// endTs = tz.getOffset(endTs) + endTs;
//
// List<TargetSeries> targetSeries = new ArrayList<>();
//
// List<Target> output = new ArrayList<>();
// try {
// for (TargetSeries targetSeriesEntry : targetSeries) {
// GrafanaUtils.queryAndGetData(engine, dbName, startTs, endTs, output,
// targetSeriesEntry);
// }
// } catch (Exception e) {
// e.printStackTrace();
// }
// time.stop();
// return output;
// }
}
|
|
package crazypants.enderio.integration.waila;
import java.util.List;
import java.util.Locale;
import com.enderio.core.api.client.gui.IAdvancedTooltipProvider;
import com.enderio.core.api.client.gui.IResourceTooltipProvider;
import com.enderio.core.client.handlers.SpecialTooltipHandler;
import crazypants.enderio.BlockEio;
import crazypants.enderio.EnderIO;
import crazypants.enderio.TileEntityEio;
import crazypants.enderio.block.BlockDarkSteelAnvil;
import crazypants.enderio.conduit.IConduitBundle;
import crazypants.enderio.conduit.liquid.AbstractTankConduit;
import crazypants.enderio.conduit.power.IPowerConduit;
import crazypants.enderio.fluid.Fluids;
import crazypants.enderio.machine.IIoConfigurable;
import crazypants.enderio.machine.IoMode;
import crazypants.enderio.machine.capbank.TileCapBank;
import crazypants.enderio.machine.invpanel.TileInventoryPanel;
import crazypants.enderio.machine.painter.blocks.BlockPaintedPressurePlate;
import crazypants.enderio.paint.IPaintable;
import crazypants.enderio.paint.IPaintable.IBlockPaintableBlock;
import crazypants.enderio.paint.YetaUtil;
import crazypants.enderio.power.IInternalPoweredTile;
import crazypants.enderio.power.PowerDisplayUtil;
import mcp.mobius.waila.api.ITaggedList;
import mcp.mobius.waila.api.IWailaConfigHandler;
import mcp.mobius.waila.api.IWailaDataAccessor;
import mcp.mobius.waila.api.IWailaDataProvider;
import mcp.mobius.waila.api.IWailaRegistrar;
import mcp.mobius.waila.api.impl.ConfigHandler;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.TextFormatting;
import net.minecraft.world.World;
import net.minecraft.world.chunk.IChunkProvider;
import net.minecraftforge.fluids.FluidStack;
import static crazypants.enderio.ModObject.itemLiquidConduit;
import static crazypants.enderio.ModObject.itemPowerConduit;
import static crazypants.enderio.integration.waila.IWailaInfoProvider.BIT_BASIC;
import static crazypants.enderio.integration.waila.IWailaInfoProvider.BIT_COMMON;
import static crazypants.enderio.integration.waila.IWailaInfoProvider.BIT_DETAILED;
public class WailaCompat implements IWailaDataProvider {
private class WailaWorldWrapper extends World {
private final World wrapped;
private WailaWorldWrapper(World wrapped) {
//super(wrapped.getSaveHandler(), wrapped.getWorldInfo().getWorldName(), wrapped.provider, new WorldSettings(wrapped.getWorldInfo()), wrapped.theProfiler);
super(wrapped.getSaveHandler(), wrapped.getWorldInfo(), wrapped.provider, wrapped.theProfiler, wrapped.isRemote);
this.wrapped = wrapped;
}
@Override
public IBlockState getBlockState(BlockPos pos) {
IBlockState bs = wrapped.getBlockState(pos);
Block block = bs.getBlock();
if (block instanceof IPaintable.IBlockPaintableBlock) {
return ((IPaintable.IBlockPaintableBlock) block).getPaintSource(bs, wrapped, pos);
}
return bs;
}
@Override
public TileEntity getTileEntity(BlockPos pos) {
IBlockState bs = getBlockState(pos);
Block block = bs.getBlock();
if(block == null || !block.hasTileEntity(bs)) {
return null;
}
TileEntity te = block.createTileEntity(this, bs);
if(te == null) {
return null;
}
te.setWorldObj(this);
te.setPos(pos);
return te;
}
@Override
protected IChunkProvider createChunkProvider() {
return null;
}
@Override
public Entity getEntityByID(int p_73045_1_) {
return null;
}
@Override
protected boolean isChunkLoaded(int x, int z, boolean allowEmpty) {
return true;
}
}
public static final WailaCompat INSTANCE = new WailaCompat();
private static IWailaDataAccessor _accessor = null;
public static void load(IWailaRegistrar registrar) {
registrar.registerStackProvider(INSTANCE, BlockDarkSteelAnvil.class);
registrar.registerBodyProvider(INSTANCE, BlockEio.class);
registrar.registerBodyProvider(INSTANCE, BlockPaintedPressurePlate.class);
registrar.registerNBTProvider(INSTANCE, TileEntityEio.class);
ConfigHandler.instance().addConfig(EnderIO.MOD_NAME, "facades.hidden", EnderIO.lang.localize("waila.config.hiddenfacades"));
}
@Override
public ItemStack getWailaStack(IWailaDataAccessor accessor, IWailaConfigHandler config) {
BlockPos pos = accessor.getPosition();
if(config.getConfig("facades.hidden")) {
if (accessor.getBlock() instanceof IBlockPaintableBlock) {
// If facades are hidden, we need to ignore it
if(accessor.getTileEntity() instanceof IConduitBundle && YetaUtil.isFacadeHidden((IConduitBundle) accessor.getTileEntity(), accessor.getPlayer())) {
return null;
}
IBlockPaintableBlock bundle = (IBlockPaintableBlock) accessor.getBlock();
IBlockState facade = bundle.getPaintSource(accessor.getBlockState(), accessor.getWorld(), pos);
if(facade != null && facade.getBlock() != accessor.getBlock()) {
ItemStack ret = facade.getBlock().getPickBlock(facade, accessor.getMOP(), new WailaWorldWrapper(accessor.getWorld()), pos, accessor.getPlayer());
return ret;
}
}
} else if(accessor.getBlock() instanceof BlockDarkSteelAnvil) {
return accessor.getBlock().getPickBlock(accessor.getBlockState(), accessor.getMOP(), accessor.getWorld(), accessor.getPosition(), accessor.getPlayer());
}
return null;
}
@Override
public List<String> getWailaHead(ItemStack itemStack, List<String> currenttip, IWailaDataAccessor accessor, IWailaConfigHandler config) {
return currenttip;
}
@SuppressWarnings("unchecked")
@Override
public List<String> getWailaBody(ItemStack itemStack, List<String> currenttip, IWailaDataAccessor accessor, IWailaConfigHandler config) {
_accessor = accessor;
EntityPlayer player = accessor.getPlayer();
BlockPos pos = accessor.getPosition();
World world = accessor.getWorld();
IBlockState bs = world.getBlockState(pos);
Block block = bs.getBlock();
TileEntity te = world.getTileEntity(pos);
Item item = Item.getItemFromBlock(block);
// let's get rid of WAILA's default RF stuff, only supported on WAILA 1.5.9+
((ITaggedList<String, String>) currenttip).removeEntries("RFEnergyStorage");
if(te instanceof IIoConfigurable && block == accessor.getBlock()) {
IIoConfigurable machine = (IIoConfigurable) te;
EnumFacing side = accessor.getSide();
IoMode mode = machine.getIoMode(side);
currenttip.add(TextFormatting.YELLOW
+ EnderIO.lang.localize("gui.machine.side", TextFormatting.WHITE + EnderIO.lang.localize("gui.machine.side." + side.name().toLowerCase(Locale.US))));
if(!(te instanceof TileInventoryPanel)) {
currenttip.add(TextFormatting.YELLOW + EnderIO.lang.localize("gui.machine.ioMode", mode.colorLocalisedName()));
}
}
if(block instanceof IWailaInfoProvider) {
IWailaInfoProvider info = (IWailaInfoProvider) block;
if(block instanceof IAdvancedTooltipProvider) {
int mask = info.getDefaultDisplayMask(world, pos.getX(), pos.getY(), pos.getZ());
boolean basic = (mask & BIT_BASIC) == BIT_BASIC;
boolean common = (mask & BIT_COMMON) == BIT_COMMON;
boolean detailed = (mask & BIT_DETAILED) == BIT_DETAILED;
IAdvancedTooltipProvider adv = (IAdvancedTooltipProvider) block;
if(common) {
adv.addCommonEntries(itemStack, player, currenttip, false);
}
if(SpecialTooltipHandler.showAdvancedTooltips() && detailed) {
adv.addDetailedEntries(itemStack, player, currenttip, false);
} else if(detailed) { // show "<Hold Shift>"
SpecialTooltipHandler.addShowDetailsTooltip(currenttip);
}
if(!SpecialTooltipHandler.showAdvancedTooltips() && basic) {
adv.addBasicEntries(itemStack, player, currenttip, false);
}
} else if(block instanceof IResourceTooltipProvider) {
SpecialTooltipHandler.INSTANCE.addInformation((IResourceTooltipProvider) block, itemStack, player, currenttip);
}
if(currenttip.size() > 0) {
currenttip.add("");
}
info.getWailaInfo(currenttip, player, world, pos.getX(), pos.getY(), pos.getZ());
}
else {
if(block instanceof IAdvancedTooltipProvider) {
SpecialTooltipHandler.INSTANCE.addInformation((IAdvancedTooltipProvider) block, itemStack, player, currenttip, false);
} else if(item instanceof IAdvancedTooltipProvider) {
SpecialTooltipHandler.INSTANCE.addInformation((IAdvancedTooltipProvider) item, itemStack, player, currenttip, false);
} else if(block instanceof IResourceTooltipProvider) {
SpecialTooltipHandler.INSTANCE.addInformation((IResourceTooltipProvider) block, itemStack, player, currenttip);
}
}
if(te instanceof IConduitBundle) {
getWailaBodyConduitBundle(itemStack, currenttip);
} else if(te instanceof IInternalPoweredTile && block == accessor.getBlock() && !(te instanceof TileCapBank)) {
IInternalPoweredTile power = (IInternalPoweredTile) te;
if(power.displayPower()) {
if(currenttip.size() > 4) {
currenttip.add("");
}
// Why do we dump the TEs state into NBT to get these values? We have the TE and could ask it directly.
int stored = accessor.getNBTData().getInteger("storedEnergyRF");
int max = accessor.getNBTData().getInteger("maxStoredRF");
currenttip.add(String.format("%s%s%s / %s%s%s %s", TextFormatting.WHITE, PowerDisplayUtil.formatPower(stored), TextFormatting.RESET,
TextFormatting.WHITE, PowerDisplayUtil.formatPower(max), TextFormatting.RESET, PowerDisplayUtil.abrevation()));
}
}
return currenttip;
}
private void getWailaBodyConduitBundle(ItemStack itemStack, List<String> currenttip) {
if(itemStack == null) {
return;
}
if (itemStack.getItem() == itemPowerConduit.getItem()) {
NBTTagCompound nbtRoot = _accessor.getNBTData();
if(nbtRoot.hasKey("storedEnergyRF")) {
int stored = nbtRoot.getInteger("storedEnergyRF");
int max = nbtRoot.getInteger("maxStoredRF");
currenttip.add(String.format("%s%s%s / %s%s%s %s", TextFormatting.WHITE, PowerDisplayUtil.formatPower(stored), TextFormatting.RESET,
TextFormatting.WHITE, PowerDisplayUtil.formatPower(max), TextFormatting.RESET, PowerDisplayUtil.abrevation()));
}
if(nbtRoot.hasKey("maxStoredRF")) {
int max = nbtRoot.getInteger("maxStoredRF");
currenttip.add(String.format("%s %s %s", "Max", PowerDisplayUtil.formatPower(max), PowerDisplayUtil.abrevation() + PowerDisplayUtil.perTickStr()));
}
} else if (itemStack.getItem() == itemLiquidConduit.getItem()) {
NBTTagCompound nbtRoot = _accessor.getNBTData();
if(nbtRoot.hasKey("fluidLocked") && nbtRoot.hasKey("FluidName")) {
boolean fluidTypeLocked = nbtRoot.getBoolean("fluidLocked");
FluidStack fluid = FluidStack.loadFluidStackFromNBT(nbtRoot);
String lockedStr = fluidTypeLocked ? EnderIO.lang.localize("itemLiquidConduit.lockedWaila") : "";
String fluidName = fluid.getLocalizedName();
int fluidAmount = fluid.amount;
if(fluidAmount > 0) {
// NOTE: using PowerDisplayUtil.formatPower here to handle the non breaking space issue
currenttip.add(String.format("%s%s%s%s %s%s%s %s", lockedStr,
TextFormatting.WHITE, fluidName, TextFormatting.RESET,
TextFormatting.WHITE, PowerDisplayUtil.formatPower(fluidAmount), TextFormatting.RESET,
Fluids.MB()));
} else if(fluidTypeLocked) {
currenttip.add(String.format("%s%s%s%s", lockedStr,
TextFormatting.WHITE, fluidName, TextFormatting.RESET));
}
}
}
}
@Override
public List<String> getWailaTail(ItemStack itemStack, List<String> currenttip, IWailaDataAccessor accessor, IWailaConfigHandler config) {
return currenttip;
}
@Override
public NBTTagCompound getNBTData(EntityPlayerMP player, TileEntity te, NBTTagCompound tag, World world, BlockPos pos) {
if(te instanceof IWailaNBTProvider) {
((IWailaNBTProvider) te).getData(tag);
}
if(te instanceof IConduitBundle) {
IConduitBundle icb = (IConduitBundle) te;
IPowerConduit pc = icb.getConduit(IPowerConduit.class);
if(pc != null ) {
tag.setInteger("maxStoredRF", pc.getMaxEnergyStored(null));
if(icb.displayPower()) {
tag.setInteger("storedEnergyRF", pc.getEnergyStored(null));
}
}
AbstractTankConduit atc = icb.getConduit(AbstractTankConduit.class);
if(atc != null) {
FluidStack fluid = atc.getTank().getFluid();
if(fluid != null) {
tag.setBoolean("fluidLocked", atc.isFluidTypeLocked());
fluid.writeToNBT(tag);
}
}
} else if(te instanceof IInternalPoweredTile) {
IInternalPoweredTile ipte = (IInternalPoweredTile) te;
tag.setInteger("storedEnergyRF", ipte.getEnergyStored(null));
tag.setInteger("maxStoredRF", ipte.getMaxEnergyStored(null));
}
tag.setInteger("x", pos.getX());
tag.setInteger("y", pos.getY());
tag.setInteger("z", pos.getZ());
return tag;
}
public static NBTTagCompound getNBTData() {
return _accessor.getNBTData();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.impl.pb.client;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodesToAttributesMappingRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshClusterMaxPriorityRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResourcesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocolPB;
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.CheckForDecommissioningNodesRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.CheckForDecommissioningNodesResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodesToAttributesMappingRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodesToAttributesMappingResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshClusterMaxPriorityRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshClusterMaxPriorityResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesResourcesRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesResourcesResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshQueuesRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshQueuesResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshServiceAclsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshServiceAclsResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsConfigurationRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsConfigurationResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.CheckForDecommissioningNodesRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.CheckForDecommissioningNodesResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodesToAttributesMappingRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodesToAttributesMappingResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshAdminAclsRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshAdminAclsResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshClusterMaxPriorityRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshClusterMaxPriorityResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesResourcesRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesResourcesResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshQueuesRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshQueuesResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshServiceAclsRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshServiceAclsResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
import com.google.protobuf.ServiceException;
@Private
public class ResourceManagerAdministrationProtocolPBClientImpl implements ResourceManagerAdministrationProtocol, Closeable {
private ResourceManagerAdministrationProtocolPB proxy;
public ResourceManagerAdministrationProtocolPBClientImpl(long clientVersion, InetSocketAddress addr,
Configuration conf) throws IOException {
RPC.setProtocolEngine(conf, ResourceManagerAdministrationProtocolPB.class,
ProtobufRpcEngine.class);
proxy = (ResourceManagerAdministrationProtocolPB)RPC.getProxy(
ResourceManagerAdministrationProtocolPB.class, clientVersion, addr, conf);
}
@Override
public void close() {
if (this.proxy != null) {
RPC.stopProxy(this.proxy);
}
}
@Override
public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request)
throws YarnException, IOException {
RefreshQueuesRequestProto requestProto =
((RefreshQueuesRequestPBImpl)request).getProto();
try {
return new RefreshQueuesResponsePBImpl(
proxy.refreshQueues(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshNodesResponse refreshNodes(RefreshNodesRequest request)
throws YarnException, IOException {
RefreshNodesRequestProto requestProto =
((RefreshNodesRequestPBImpl)request).getProto();
try {
return new RefreshNodesResponsePBImpl(
proxy.refreshNodes(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration(
RefreshSuperUserGroupsConfigurationRequest request)
throws YarnException, IOException {
RefreshSuperUserGroupsConfigurationRequestProto requestProto =
((RefreshSuperUserGroupsConfigurationRequestPBImpl)request).getProto();
try {
return new RefreshSuperUserGroupsConfigurationResponsePBImpl(
proxy.refreshSuperUserGroupsConfiguration(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings(
RefreshUserToGroupsMappingsRequest request) throws YarnException,
IOException {
RefreshUserToGroupsMappingsRequestProto requestProto =
((RefreshUserToGroupsMappingsRequestPBImpl)request).getProto();
try {
return new RefreshUserToGroupsMappingsResponsePBImpl(
proxy.refreshUserToGroupsMappings(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshAdminAclsResponse refreshAdminAcls(
RefreshAdminAclsRequest request) throws YarnException, IOException {
RefreshAdminAclsRequestProto requestProto =
((RefreshAdminAclsRequestPBImpl)request).getProto();
try {
return new RefreshAdminAclsResponsePBImpl(
proxy.refreshAdminAcls(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshServiceAclsResponse refreshServiceAcls(
RefreshServiceAclsRequest request) throws YarnException,
IOException {
RefreshServiceAclsRequestProto requestProto =
((RefreshServiceAclsRequestPBImpl)request).getProto();
try {
return new RefreshServiceAclsResponsePBImpl(proxy.refreshServiceAcls(
null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public String[] getGroupsForUser(String user) throws IOException {
GetGroupsForUserRequestProto requestProto =
GetGroupsForUserRequestProto.newBuilder().setUser(user).build();
try {
GetGroupsForUserResponseProto responseProto =
proxy.getGroupsForUser(null, requestProto);
return (String[]) responseProto.getGroupsList().toArray(
new String[responseProto.getGroupsCount()]);
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
}
@Override
public UpdateNodeResourceResponse updateNodeResource(
UpdateNodeResourceRequest request) throws YarnException, IOException {
UpdateNodeResourceRequestProto requestProto =
((UpdateNodeResourceRequestPBImpl) request).getProto();
try {
return new UpdateNodeResourceResponsePBImpl(proxy.updateNodeResource(null,
requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshNodesResourcesResponse refreshNodesResources(
RefreshNodesResourcesRequest request) throws YarnException, IOException {
RefreshNodesResourcesRequestProto requestProto =
((RefreshNodesResourcesRequestPBImpl)request).getProto();
try {
return new RefreshNodesResourcesResponsePBImpl(
proxy.refreshNodesResources(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public AddToClusterNodeLabelsResponse addToClusterNodeLabels(
AddToClusterNodeLabelsRequest request) throws YarnException, IOException {
AddToClusterNodeLabelsRequestProto requestProto =
((AddToClusterNodeLabelsRequestPBImpl) request).getProto();
try {
return new AddToClusterNodeLabelsResponsePBImpl(
proxy.addToClusterNodeLabels(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels(
RemoveFromClusterNodeLabelsRequest request) throws YarnException,
IOException {
RemoveFromClusterNodeLabelsRequestProto requestProto =
((RemoveFromClusterNodeLabelsRequestPBImpl) request).getProto();
try {
return new RemoveFromClusterNodeLabelsResponsePBImpl(
proxy.removeFromClusterNodeLabels(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public ReplaceLabelsOnNodeResponse replaceLabelsOnNode(
ReplaceLabelsOnNodeRequest request) throws YarnException, IOException {
ReplaceLabelsOnNodeRequestProto requestProto =
((ReplaceLabelsOnNodeRequestPBImpl) request).getProto();
try {
return new ReplaceLabelsOnNodeResponsePBImpl(proxy.replaceLabelsOnNodes(
null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public CheckForDecommissioningNodesResponse checkForDecommissioningNodes(
CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest)
throws YarnException, IOException {
CheckForDecommissioningNodesRequestProto requestProto =
((CheckForDecommissioningNodesRequestPBImpl) checkForDecommissioningNodesRequest)
.getProto();
try {
return new CheckForDecommissioningNodesResponsePBImpl(
proxy.checkForDecommissioningNodes(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RefreshClusterMaxPriorityResponse refreshClusterMaxPriority(
RefreshClusterMaxPriorityRequest request) throws YarnException,
IOException {
RefreshClusterMaxPriorityRequestProto requestProto =
((RefreshClusterMaxPriorityRequestPBImpl) request).getProto();
try {
return new RefreshClusterMaxPriorityResponsePBImpl(
proxy.refreshClusterMaxPriority(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public NodesToAttributesMappingResponse mapAttributesToNodes(
NodesToAttributesMappingRequest request)
throws YarnException, IOException {
NodesToAttributesMappingRequestProto requestProto =
((NodesToAttributesMappingRequestPBImpl) request).getProto();
try {
return new NodesToAttributesMappingResponsePBImpl(
proxy.mapAttributesToNodes(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.orm.dao;
import java.sql.SQLException;
import java.util.UUID;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.H2DatabaseCleaner;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.orm.entities.StackEntity;
import org.apache.ambari.server.state.RepositoryType;
import org.apache.ambari.server.state.StackId;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.inject.Guice;
import com.google.inject.Injector;
/**
* RepositoryVersionDAO unit tests.
*/
public class RepositoryVersionDAOTest {
private static Injector injector;
private static final StackId HDP_206 = new StackId("HDP", "2.0.6");
private static final StackId OTHER_10 = new StackId("OTHER", "1.0");
private static final StackId BAD_STACK = new StackId("BADSTACK", "1.0");
private RepositoryVersionDAO repositoryVersionDAO;
private StackDAO stackDAO;
@Before
public void before() {
injector = Guice.createInjector(new InMemoryDefaultTestModule());
repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
stackDAO = injector.getInstance(StackDAO.class);
injector.getInstance(GuiceJpaInitializer.class);
// required to populate stacks into the database
injector.getInstance(AmbariMetaInfo.class);
}
private RepositoryVersionEntity createSingleRecord() {
StackEntity stackEntity = stackDAO.find(HDP_206.getStackName(),
HDP_206.getStackVersion());
Assert.assertNotNull(stackEntity);
final RepositoryVersionEntity entity = new RepositoryVersionEntity();
entity.setDisplayName("display name");
entity.setOperatingSystems("repositories");
entity.setStack(stackEntity);
entity.setVersion("version");
repositoryVersionDAO.create(entity);
return entity;
}
@Test
public void testCreate() {
UUID uuid = UUID.randomUUID();
RepositoryVersionEntity first = createSingleRecord();
Assert.assertNotNull(first);
StackEntity stackEntity = stackDAO.find(first.getStackName(), first.getStackVersion());
Assert.assertNotNull(stackEntity);
// Assert the version must be unique
RepositoryVersionEntity dupVersion = new RepositoryVersionEntity();
dupVersion.setDisplayName("display name " + uuid);
dupVersion.setOperatingSystems("repositories");
dupVersion.setStack(stackEntity);
dupVersion.setVersion(first.getVersion());
boolean exceptionThrown = false;
try {
repositoryVersionDAO.create(stackEntity, dupVersion.getVersion(), dupVersion.getDisplayName(), dupVersion.getOperatingSystemsJson());
} catch (AmbariException e) {
exceptionThrown = true;
Assert.assertTrue(e.getMessage().contains("already exists"));
}
// Expected the exception to be thrown since the build version was reused in the second record.
Assert.assertTrue(exceptionThrown);
exceptionThrown = false;
// The version must belong to the stack
dupVersion.setVersion("2.3-1234");
try {
repositoryVersionDAO.create(stackEntity, dupVersion.getVersion(), dupVersion.getDisplayName(), dupVersion.getOperatingSystemsJson());
} catch (AmbariException e) {
exceptionThrown = true;
Assert.assertTrue(e.getMessage().contains("needs to belong to stack"));
}
// Expected the exception to be thrown since the version does not belong to the stack.
Assert.assertTrue(exceptionThrown);
// Success
dupVersion.setVersion(stackEntity.getStackVersion() + "-1234");
try {
repositoryVersionDAO.create(stackEntity, dupVersion.getVersion(), dupVersion.getDisplayName(), dupVersion.getOperatingSystemsJson());
} catch (AmbariException e) {
Assert.fail("Did not expect a failure creating the Repository Version");
}
}
@Test
public void testFindByDisplayName() {
createSingleRecord();
Assert.assertNull(repositoryVersionDAO.findByDisplayName("non existing"));
Assert.assertNotNull(repositoryVersionDAO.findByDisplayName("display name"));
}
@Test
public void testFindByStackAndVersion() {
createSingleRecord();
Assert.assertNull(repositoryVersionDAO.findByStackAndVersion(BAD_STACK,
"non existing"));
Assert.assertNotNull(repositoryVersionDAO.findByStackAndVersion(HDP_206,
"version"));
}
@Test
public void testFindByStack() {
createSingleRecord();
Assert.assertEquals(0, repositoryVersionDAO.findByStack(BAD_STACK).size());
Assert.assertEquals(1, repositoryVersionDAO.findByStack(HDP_206).size());
}
@Test
public void testDelete() {
createSingleRecord();
Assert.assertNotNull(repositoryVersionDAO.findByStackAndVersion(HDP_206,
"version"));
final RepositoryVersionEntity entity = repositoryVersionDAO.findByStackAndVersion(
HDP_206, "version");
repositoryVersionDAO.remove(entity);
Assert.assertNull(repositoryVersionDAO.findByStackAndVersion(HDP_206,
"version"));
}
@Test
public void testRemovePrefixFromVersion() {
StackEntity hdp206StackEntity = stackDAO.find(HDP_206.getStackName(),
HDP_206.getStackVersion());
Assert.assertNotNull(hdp206StackEntity);
final RepositoryVersionEntity hdp206RepoEntity = new RepositoryVersionEntity();
hdp206RepoEntity.setDisplayName("HDP-2.0.6.0-1234");
hdp206RepoEntity.setOperatingSystems("repositories");
hdp206RepoEntity.setStack(hdp206StackEntity);
hdp206RepoEntity.setVersion("HDP-2.0.6.0-1234");
repositoryVersionDAO.create(hdp206RepoEntity);
Assert.assertEquals("Failed to remove HDP stack prefix from version", "2.0.6.0-1234", hdp206RepoEntity.getVersion());
Assert.assertNotNull(repositoryVersionDAO.findByDisplayName("HDP-2.0.6.0-1234"));
Assert.assertNotNull(repositoryVersionDAO.findByStackAndVersion(HDP_206,
"2.0.6.0-1234"));
StackEntity other10StackEntity = stackDAO.find(OTHER_10.getStackName(),
OTHER_10.getStackVersion());
Assert.assertNotNull(other10StackEntity);
final RepositoryVersionEntity other10RepoEntity = new RepositoryVersionEntity();
other10RepoEntity.setDisplayName("OTHER-1.0.1.0-1234");
other10RepoEntity.setOperatingSystems("repositories");
other10RepoEntity.setStack(other10StackEntity);
other10RepoEntity.setVersion("OTHER-1.0.1.0-1234");
repositoryVersionDAO.create(other10RepoEntity);
Assert.assertEquals("Failed to remove OTHER stack prefix from version", "1.0.1.0-1234", other10RepoEntity.getVersion());
Assert.assertNotNull(repositoryVersionDAO.findByDisplayName("OTHER-1.0.1.0-1234"));
Assert.assertNotNull(repositoryVersionDAO.findByStackAndVersion(OTHER_10,
"1.0.1.0-1234"));
}
@Test
public void testFindByStackAndType() {
createSingleRecord();
Assert.assertEquals(1,
repositoryVersionDAO.findByStackAndType(HDP_206, RepositoryType.STANDARD).size());
Assert.assertEquals(0,
repositoryVersionDAO.findByStackAndType(HDP_206, RepositoryType.MAINT).size());
}
@After
public void after() throws AmbariException, SQLException {
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
injector = null;
}
}
|
|
package chav1961.purelib.nanoservice;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import javax.sql.DataSource;
import chav1961.purelib.basic.SubstitutableProperties;
import chav1961.purelib.basic.Utils;
import chav1961.purelib.basic.exceptions.ContentException;
import chav1961.purelib.basic.exceptions.SyntaxException;
import chav1961.purelib.basic.interfaces.LoggerFacade;
import chav1961.purelib.basic.interfaces.LoggerFacade.Severity;
import chav1961.purelib.fsys.FileSystemFactory;
import chav1961.purelib.fsys.interfaces.FileSystemInterface;
import chav1961.purelib.nanoservice.interfaces.NanoService;
import chav1961.purelib.nanoservice.interfaces.RootPath;
class NanoServiceManager implements Closeable {
public static final String NANOSERVICE_DEPLOYMENT_DIR = "nanoserviceDeploymentDir";
public static final String NANOSERVICE_DEPLOYMENT_CLASSPREFIX = "nanoserviceDeploymentClassPrefix";
public static final String NANOSERVICE_DEPLOYMENT_PERIOD = "nanoserviceDeploymentPeriod";
enum DeploymentMode {
deploy, redeploy, undeploy
}
private final LoggerFacade facade;
private final DataSource dataSource;
private final NanoService factory;
private final Timer t = new Timer(true);
private final FileSystemInterface deploymentRoot;
private final String deplaymentClassPrefix;
private final long deploymentPeriod;
private final Map<String,DeploymentDesc> deployed = new ConcurrentHashMap<>();
private URLClassLoader currentLoader = new AdvancedURLClassLoader(this.getClass().getClassLoader());
public NanoServiceManager(final LoggerFacade facade, final SubstitutableProperties props, final NanoService factory) throws NullPointerException, IOException, ContentException, SyntaxException {
this(facade,props,factory,null);
}
public NanoServiceManager(final LoggerFacade facade, final SubstitutableProperties props, final NanoService factory, final DataSource dataSource) throws NullPointerException, IOException, ContentException, SyntaxException {
if (facade == null) {
throw new NullPointerException("Logger facade can't be null");
}
else if (props == null) {
throw new NullPointerException("Service properties can't be null");
}
else if (factory == null) {
throw new NullPointerException("Nano service factory can't be null");
}
else {
this.facade = facade;
this.dataSource = dataSource;
this.factory = factory;
try(final LoggerFacade check = facade.transaction("Microservice init")) {
boolean wereErrors = false;
if (!props.containsKey(NANOSERVICE_DEPLOYMENT_DIR)) {
wereErrors = true;
check.message(Severity.error, "Mandatory parameter [%1$s] is missing in the configuration",NANOSERVICE_DEPLOYMENT_DIR);
deploymentRoot = null;
}
else {
deploymentRoot = FileSystemFactory.createFileSystem(props.getProperty(NANOSERVICE_DEPLOYMENT_DIR,URI.class));
}
deplaymentClassPrefix = props.getProperty(NANOSERVICE_DEPLOYMENT_CLASSPREFIX,String.class,"");
deploymentPeriod = props.getProperty(NANOSERVICE_DEPLOYMENT_PERIOD,long.class,"0");
if (!wereErrors) {
redeploy();
if (deploymentPeriod > 0) {
final TimerTask tt = new TimerTask() {
@Override
public void run() {
try{redeploy();
} catch (IOException | ContentException e) {
getLogger().message(Severity.error,e,"Error redeploying plugins");
}
}
};
t.schedule(tt,deploymentPeriod,deploymentPeriod);
}
check.rollback();
}
else {
throw new IllegalArgumentException("Error initializing manager (see log for details)");
}
}
}
}
@Override
public void close() throws IOException {
t.purge();
t.cancel();
}
public LoggerFacade getLogger() {
return facade;
}
protected int redeploy() throws IOException, ContentException, SyntaxException {
final Map<String,DeploymentDesc> newContent = new HashMap<>();
int delta = 0;
parseDeploymentDirectory(deploymentRoot,newContent);
final Map<String,DeploymentMode> changedContent = compareDeploymentDirectory(deployed,newContent);
if (!changedContent.isEmpty()) {
try(final LoggerFacade lf = facade.transaction("redeploy")) {
final AdvancedURLClassLoader newLoader = new AdvancedURLClassLoader(this.getClass().getClassLoader());
final Map<String,DeploymentDesc> forDeploy = new HashMap<>();
for (Entry<String, DeploymentMode> item : changedContent.entrySet()) {
switch (item.getValue()) {
case redeploy : case deploy :
try(final FileSystemInterface fsi = deploymentRoot.clone().open(item.getKey())) {
forDeploy.put(item.getKey(),buildDeploymentDesc(fsi,newLoader,deplaymentClassPrefix));
}
break;
case undeploy :
break;
default :
throw new UnsupportedOperationException("Deployment mode ["+item.getValue()+"] is not supported yet");
}
}
for (Entry<String, DeploymentMode> item : changedContent.entrySet()) {
switch (item.getValue()) {
case undeploy :
for (Entry<String,Object> entity : deployed.get(item.getKey()).classes.entrySet()) {
factory.undeploy(entity.getKey());
delta--;
}
break;
case redeploy :
for (Entry<String,Object> entity : deployed.get(item.getKey()).classes.entrySet()) {
factory.undeploy(entity.getKey());
delta--;
}
case deploy :
for (Entry<String,DeploymentDesc> unit : forDeploy.entrySet()) {
for (Entry<String,Object> entity : unit.getValue().classes.entrySet()) {
factory.deploy(entity.getKey(),entity.getValue());
delta++;
}
}
break;
default :
throw new UnsupportedOperationException("Deployment mode ["+item.getValue()+"] is not supported yet");
}
}
deployed.clear();
deployed.putAll(forDeploy);
lf.rollback();
}
}
return delta;
}
static void parseDeploymentDirectory(final FileSystemInterface node, final Map<String,DeploymentDesc> content) throws IOException {
for (String item : node.list()) {
try(final FileSystemInterface fsi = node.clone().open(item)) {
if (item.endsWith(".jar") || item.endsWith(".class")) {
content.put(fsi.getPath(),new DeploymentDesc(item.endsWith(".jar") ? DeploymentDesc.UNIT_JAR : DeploymentDesc.UNIT_CLASS,fsi.getPath(),fsi.lastModified()));
}
else if (fsi.isDirectory()) {
parseDeploymentDirectory(fsi,content);
}
}
}
}
static Map<String,DeploymentMode> compareDeploymentDirectory(final Map<String,DeploymentDesc> oldContent, final Map<String,DeploymentDesc> newContent) throws IOException {
final Map<String,DeploymentMode> result = new HashMap<>();
for (Entry<String, DeploymentDesc> item : newContent.entrySet()) {
if (!oldContent.containsKey(item.getKey())) {
result.put(item.getKey(),DeploymentMode.deploy);
}
else if (oldContent.get(item.getKey()).timestamp < item.getValue().timestamp) {
result.put(item.getKey(),DeploymentMode.redeploy);
}
}
for (Entry<String, DeploymentDesc> item : oldContent.entrySet()) {
if (!newContent.containsKey(item.getKey())) {
result.put(item.getKey(),DeploymentMode.undeploy);
}
}
return result;
}
static DeploymentDesc buildDeploymentDesc(final FileSystemInterface source, final AdvancedURLClassLoader loader, final String classPrefix) throws MalformedURLException, IOException, ContentException {
final DeploymentDesc dd;
if (source.getName().endsWith(".jar")) {
dd = new DeploymentDesc(DeploymentDesc.UNIT_JAR,source.getPath(),source.lastModified());
try(final InputStream is = source.read();
final JarInputStream jis = new JarInputStream(is)) {
JarEntry je;
while ((je = jis.getNextJarEntry()) != null) {
if (je.getName().endsWith(".class")) {
Class<?> cl = loadFrom(toClassName(je.getName()),loader,jis);
if (cl.isAnnotationPresent(RootPath.class)) {
try{dd.classes.put(cl.getAnnotation(RootPath.class).value(),cl.newInstance());
} catch (InstantiationException | IllegalAccessException e) {
throw new ContentException();
}
}
}
}
}
loader.addURL(source.toURI().toURL());
}
else if (source.getName().endsWith(".class")) {
dd = new DeploymentDesc(DeploymentDesc.UNIT_CLASS,source.getPath(),source.lastModified());
try(final InputStream is = source.read()) {
Class<?> cl = loadFrom(classPrefix+toClassName(source.getPath()),loader,is);
if (cl.isAnnotationPresent(RootPath.class)) {
try{dd.classes.put(cl.getAnnotation(RootPath.class).value(),cl.newInstance());
} catch (InstantiationException | IllegalAccessException e) {
throw new ContentException();
}
}
}
loader.addURL(source.clone().open("../").toURI().toURL());
}
else {
throw new IllegalArgumentException("Source path ["+source.getPath()+"] is neither .jar nor .class");
}
return dd;
}
private static Class<?> loadFrom(final String className, final AdvancedURLClassLoader loader, final InputStream is) throws IOException {
try(final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
Utils.copyStream(is,baos);
return loader.defineClassInternal(className,baos.toByteArray());
}
}
private static String toClassName(final String name) {
final String result = name.replace('/','.').replace(".class","");
if (name.startsWith("/")) {
return result.substring(1);
}
else {
return result;
}
}
static class AdvancedURLClassLoader extends URLClassLoader {
AdvancedURLClassLoader(ClassLoader parent) {
super(new URL[0], parent);
}
@Override
protected void addURL(final URL url) {
super.addURL(url);
}
private Class<?> defineClassInternal(final String className, final byte[] content) {
return defineClass(className,content,0,content.length);
}
}
static class DeploymentDesc {
private static final int UNIT_JAR = 0;
private static final int UNIT_CLASS = 1;
final int unitType;
final String path;
final long timestamp;
final Map<String,Object> classes = new HashMap<>();
private DeploymentDesc(int unitType, String path, long timestamp) {
this.unitType = unitType;
this.path = path;
this.timestamp = timestamp;
}
@Override
public String toString() {
return "DeploymentDesc [unitType=" + unitType + ", path=" + path + ", timestamp=" + timestamp + ", classes=" + classes.entrySet() + "]";
}
}
}
|
|
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf.util;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Message;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.logging.Logger;
/**
* A tree representation of a FieldMask. Each leaf node in this tree represent
* a field path in the FieldMask.
*
* <p>For example, FieldMask "foo.bar,foo.baz,bar.baz" as a tree will be:
* <pre>
* [root] -+- foo -+- bar
* | |
* | +- baz
* |
* +- bar --- baz
* </pre>
*
* <p>By representing FieldMasks with this tree structure we can easily convert
* a FieldMask to a canonical form, merge two FieldMasks, calculate the
* intersection to two FieldMasks and traverse all fields specified by the
* FieldMask in a message tree.
*/
final class FieldMaskTree {
private static final Logger logger = Logger.getLogger(FieldMaskTree.class.getName());
private static final String FIELD_PATH_SEPARATOR_REGEX = "\\.";
private static final class Node {
final SortedMap<String, Node> children = new TreeMap<String, Node>();
}
private final Node root = new Node();
/**
* Creates an empty FieldMaskTree.
*/
FieldMaskTree() {}
/**
* Creates a FieldMaskTree for a given FieldMask.
*/
FieldMaskTree(FieldMask mask) {
mergeFromFieldMask(mask);
}
@Override
public String toString() {
return FieldMaskUtil.toString(toFieldMask());
}
/**
* Adds a field path to the tree. In a FieldMask, every field path matches the
* specified field as well as all its sub-fields. For example, a field path
* "foo.bar" matches field "foo.bar" and also "foo.bar.baz", etc. When adding
* a field path to the tree, redundant sub-paths will be removed. That is,
* after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
* exists, which will turn the tree node for "foo.bar" to a leaf node.
* Likewise, if the field path to add is a sub-path of an existing leaf node,
* nothing will be changed in the tree.
*/
FieldMaskTree addFieldPath(String path) {
String[] parts = path.split(FIELD_PATH_SEPARATOR_REGEX);
if (parts.length == 0) {
return this;
}
Node node = root;
boolean createNewBranch = false;
// Find the matching node in the tree.
for (String part : parts) {
// Check whether the path matches an existing leaf node.
if (!createNewBranch && node != root && node.children.isEmpty()) {
// The path to add is a sub-path of an existing leaf node.
return this;
}
if (node.children.containsKey(part)) {
node = node.children.get(part);
} else {
createNewBranch = true;
Node tmp = new Node();
node.children.put(part, tmp);
node = tmp;
}
}
// Turn the matching node into a leaf node (i.e., remove sub-paths).
node.children.clear();
return this;
}
/**
* Merges all field paths in a FieldMask into this tree.
*/
FieldMaskTree mergeFromFieldMask(FieldMask mask) {
for (String path : mask.getPathsList()) {
addFieldPath(path);
}
return this;
}
/**
* Converts this tree to a FieldMask.
*/
FieldMask toFieldMask() {
if (root.children.isEmpty()) {
return FieldMask.getDefaultInstance();
}
List<String> paths = new ArrayList<String>();
getFieldPaths(root, "", paths);
return FieldMask.newBuilder().addAllPaths(paths).build();
}
/**
* Gathers all field paths in a sub-tree.
*/
private void getFieldPaths(Node node, String path, List<String> paths) {
if (node.children.isEmpty()) {
paths.add(path);
return;
}
for (Entry<String, Node> entry : node.children.entrySet()) {
String childPath = path.isEmpty() ? entry.getKey() : path + "." + entry.getKey();
getFieldPaths(entry.getValue(), childPath, paths);
}
}
/**
* Adds the intersection of this tree with the given {@code path} to {@code output}.
*/
void intersectFieldPath(String path, FieldMaskTree output) {
if (root.children.isEmpty()) {
return;
}
String[] parts = path.split(FIELD_PATH_SEPARATOR_REGEX);
if (parts.length == 0) {
return;
}
Node node = root;
for (String part : parts) {
if (node != root && node.children.isEmpty()) {
// The given path is a sub-path of an existing leaf node in the tree.
output.addFieldPath(path);
return;
}
if (node.children.containsKey(part)) {
node = node.children.get(part);
} else {
return;
}
}
// We found a matching node for the path. All leaf children of this matching
// node is in the intersection.
List<String> paths = new ArrayList<String>();
getFieldPaths(node, path, paths);
for (String value : paths) {
output.addFieldPath(value);
}
}
/**
* Merges all fields specified by this FieldMaskTree from {@code source} to {@code destination}.
*/
void merge(Message source, Message.Builder destination, FieldMaskUtil.MergeOptions options) {
if (source.getDescriptorForType() != destination.getDescriptorForType()) {
throw new IllegalArgumentException("Cannot merge messages of different types.");
}
if (root.children.isEmpty()) {
return;
}
merge(root, "", source, destination, options);
}
/**
* Merges all fields specified by a sub-tree from {@code source} to {@code destination}.
*/
private void merge(
Node node,
String path,
Message source,
Message.Builder destination,
FieldMaskUtil.MergeOptions options) {
assert source.getDescriptorForType() == destination.getDescriptorForType();
Descriptor descriptor = source.getDescriptorForType();
for (Entry<String, Node> entry : node.children.entrySet()) {
FieldDescriptor field = descriptor.findFieldByName(entry.getKey());
if (field == null) {
logger.warning(
"Cannot find field \""
+ entry.getKey()
+ "\" in message type "
+ descriptor.getFullName());
continue;
}
if (!entry.getValue().children.isEmpty()) {
if (field.isRepeated() || field.getJavaType() != FieldDescriptor.JavaType.MESSAGE) {
logger.warning(
"Field \""
+ field.getFullName()
+ "\" is not a "
+ "singluar message field and cannot have sub-fields.");
continue;
}
String childPath = path.isEmpty() ? entry.getKey() : path + "." + entry.getKey();
merge(
entry.getValue(),
childPath,
(Message) source.getField(field),
destination.getFieldBuilder(field),
options);
continue;
}
if (field.isRepeated()) {
if (options.replaceRepeatedFields()) {
destination.setField(field, source.getField(field));
} else {
for (Object element : (List) source.getField(field)) {
destination.addRepeatedField(field, element);
}
}
} else {
if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
if (options.replaceMessageFields()) {
if (!source.hasField(field)) {
destination.clearField(field);
} else {
destination.setField(field, source.getField(field));
}
} else {
if (source.hasField(field)) {
destination.getFieldBuilder(field).mergeFrom((Message) source.getField(field));
}
}
} else {
if (source.hasField(field) || !options.replacePrimitiveFields()) {
destination.setField(field, source.getField(field));
} else {
destination.clearField(field);
}
}
}
}
}
}
|
|
/*
* Copyright (c) 2015-2019 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.network.util;
import com.jme3.network.Message;
import com.jme3.network.MessageConnection;
import com.jme3.network.MessageListener;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A MessageListener implementation that will forward messages to methods
* of a delegate object. These methods can be automapped or manually
* specified. Subclasses provide specific implementations for how to
* find the actual delegate object.
*
* @author Paul Speed
*/
public abstract class AbstractMessageDelegator<S extends MessageConnection>
implements MessageListener<S> {
static final Logger log = Logger.getLogger(AbstractMessageDelegator.class.getName());
private Class delegateType;
private Map<Class, Method> methods = new HashMap<Class, Method>();
private Class[] messageTypes;
/**
* Creates an AbstractMessageDelegator that will forward received
* messages to methods of the specified delegate type. If automap
* is true then reflection is used to lookup probably message handling
* methods.
*/
protected AbstractMessageDelegator( Class delegateType, boolean automap ) {
this.delegateType = delegateType;
if( automap ) {
automap();
}
}
/**
* Returns the array of messages known to be handled by this message
* delegator.
*/
public Class[] getMessageTypes() {
if( messageTypes == null ) {
messageTypes = methods.keySet().toArray(new Class[methods.size()]);
}
return messageTypes;
}
/**
* Returns true if the specified method is valid for the specified
* message type. This is used internally during automapping to
* provide implementation specific filtering of methods.
* This implementation checks for methods that take either the connection and message
* type arguments (in that order) or just the message type.
*/
protected boolean isValidMethod( Method m, Class messageType ) {
if( log.isLoggable(Level.FINEST) ) {
log.log(Level.FINEST, "isValidMethod({0}, {1})", new Object[]{m, messageType});
}
// Parameters must be S and message type or just message type
Class<?>[] parms = m.getParameterTypes();
if( parms.length != 2 && parms.length != 1 ) {
log.finest("Parameter count is not 1 or 2");
return false;
}
int messageIndex = 0;
if( parms.length > 1 ) {
if( MessageConnection.class.isAssignableFrom(parms[0]) ) {
messageIndex++;
} else {
log.finest("First parameter is not a MessageConnection or subclass.");
return false;
}
}
if( messageType == null && !Message.class.isAssignableFrom(parms[messageIndex]) ) {
log.finest("Second parameter is not a Message or subclass.");
return false;
}
if( messageType != null && !parms[messageIndex].isAssignableFrom(messageType) ) {
log.log(Level.FINEST, "Second parameter is not a {0}", messageType);
return false;
}
return true;
}
/**
* Convenience method that returns the message type as
* reflectively determined for a particular method. This
* only works with methods that actually have arguments.
* This implementation returns the last element of the method's
* getParameterTypes() array, thus supporting both
* method(connection, messageType) as well as just method(messageType)
* calling forms.
*/
protected Class getMessageType( Method m ) {
Class<?>[] parms = m.getParameterTypes();
return parms[parms.length-1];
}
/**
* Goes through all of the delegate type's methods to find
* a method of the specified name that may take the specified
* message type.
*/
protected Method findDelegate( String name, Class messageType ) {
// We do an exhaustive search because it's easier to
// check for a variety of parameter types and it's all
// that Class would be doing in getMethod() anyway.
for( Method m : delegateType.getDeclaredMethods() ) {
if( !m.getName().equals(name) ) {
continue;
}
if( isValidMethod(m, messageType) ) {
return m;
}
}
return null;
}
/**
* Returns true if the specified method name is allowed.
* This is used by automapping to determine if a method
* should be rejected purely on name. Default implementation
* always returns true.
*/
protected boolean allowName( String name ) {
return true;
}
/**
* Calls the map(Set) method with a null argument causing
* all available matching methods to mapped to message types.
*/
protected final void automap() {
map((Set<String>)null);
if( methods.isEmpty() ) {
throw new RuntimeException("No message handling methods found for class:" + delegateType);
}
}
/**
* Specifically maps the specified methods names, autowiring
* the parameters.
*/
public AbstractMessageDelegator<S> map( String... methodNames ) {
Set<String> names = new HashSet<String>( Arrays.asList(methodNames) );
map(names);
return this;
}
/**
* Goes through all of the delegate type's declared methods
* mapping methods that match the current constraints.
* If the constraints set is null then allowName() is
* checked for names otherwise only names in the constraints
* set are allowed.
* For each candidate method that passes the above checks,
* isValidMethod() is called with a null message type argument.
* All methods are made accessible thus supporting non-public
* methods as well as public methods.
*/
protected void map( Set<String> constraints ) {
if( log.isLoggable(Level.FINEST) ) {
log.log(Level.FINEST, "map({0})", constraints);
}
for( Method m : delegateType.getDeclaredMethods() ) {
if( log.isLoggable(Level.FINEST) ) {
log.log(Level.FINEST, "Checking method:{0}", m);
}
if( constraints == null && !allowName(m.getName()) ) {
log.finest("Name is not allowed.");
continue;
}
if( constraints != null && !constraints.contains(m.getName()) ) {
log.finest("Name is not in constraints set.");
continue;
}
if( isValidMethod(m, null) ) {
if( log.isLoggable(Level.FINEST) ) {
log.log(Level.FINEST, "Adding method mapping:{0} = {1}", new Object[]{getMessageType(m), m});
}
// Make sure we can access the method even if it's not public or
// is in a non-public inner class.
m.setAccessible(true);
methods.put(getMessageType(m), m);
}
}
messageTypes = null;
}
/**
* Manually maps a specified method to the specified message type.
*/
public AbstractMessageDelegator<S> map( Class messageType, String methodName ) {
// Lookup the method
Method m = findDelegate( methodName, messageType );
if( m == null ) {
throw new RuntimeException( "Method:" + methodName
+ " not found matching signature (MessageConnection, "
+ messageType.getName() + ")" );
}
if( log.isLoggable(Level.FINEST) ) {
log.log(Level.FINEST, "Adding method mapping:{0} = {1}", new Object[]{messageType, m});
}
methods.put( messageType, m );
messageTypes = null;
return this;
}
/**
* Returns the mapped method for the specified message type.
*/
protected Method getMethod( Class c ) {
Method m = methods.get(c);
return m;
}
/**
* Implemented by subclasses to provide the actual delegate object
* against which the mapped message type methods will be called.
*/
protected abstract Object getSourceDelegate( S source );
/**
* Implementation of the MessageListener's messageReceived()
* method that will use the current message type mapping to
* find an appropriate message handling method and call it
* on the delegate returned by getSourceDelegate().
*/
@Override
public void messageReceived( S source, Message msg ) {
if( msg == null ) {
return;
}
Object delegate = getSourceDelegate(source);
if( delegate == null ) {
// Means ignore this message/source
return;
}
Method m = getMethod(msg.getClass());
if( m == null ) {
throw new RuntimeException("Delegate method not found for message class:"
+ msg.getClass());
}
try {
if( m.getParameterTypes().length > 1 ) {
m.invoke( delegate, source, msg );
} else {
m.invoke( delegate, msg );
}
} catch( IllegalAccessException e ) {
throw new RuntimeException("Error executing:" + m, e);
} catch( InvocationTargetException e ) {
throw new RuntimeException("Error executing:" + m, e.getCause());
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer;
import static org.apache.hadoop.fs.CreateFlag.CREATE;
import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
import org.apache.hadoop.yarn.server.nodemanager.recovery.RecoveryIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSError;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.DiskChecker;
import org.apache.hadoop.util.DiskValidator;
import org.apache.hadoop.util.DiskValidatorFactory;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.util.concurrent.HadoopScheduledThreadPoolExecutor;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto;
import org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.DirsChangeListener;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol;
import org.apache.hadoop.yarn.server.nodemanager.api.ResourceLocalizationSpec;
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalResourceStatus;
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerAction;
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse;
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationInitedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceFailedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.deletion.task.FileDeletionTask;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalCacheCleaner.LocalCacheCleanerStats;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ApplicationLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationCleanupEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRecoveredEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenSecretManager;
import org.apache.hadoop.yarn.server.nodemanager.executor.LocalizerStartContext;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.LocalResourceTrackerState;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredLocalizationState;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredUserResources;
import org.apache.hadoop.yarn.server.nodemanager.security.authorize.NMPolicyProvider;
import org.apache.hadoop.yarn.server.nodemanager.util.NodeManagerBuilderUtils;
import org.apache.hadoop.yarn.util.FSDownload;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
public class ResourceLocalizationService extends CompositeService
implements EventHandler<LocalizationEvent>, LocalizationProtocol {
private static final Logger LOG =
LoggerFactory.getLogger(ResourceLocalizationService.class);
public static final String NM_PRIVATE_DIR = "nmPrivate";
public static final FsPermission NM_PRIVATE_PERM = new FsPermission((short) 0700);
private static final FsPermission PUBLIC_FILECACHE_FOLDER_PERMS =
new FsPermission((short) 0755);
private Server server;
private InetSocketAddress localizationServerAddress;
@VisibleForTesting
long cacheTargetSize;
private long cacheCleanupPeriod;
private final ContainerExecutor exec;
protected final Dispatcher dispatcher;
private final DeletionService delService;
private LocalizerTracker localizerTracker;
private RecordFactory recordFactory;
private final ScheduledExecutorService cacheCleanup;
private LocalizerTokenSecretManager secretManager;
private NMStateStoreService stateStore;
@VisibleForTesting
final NodeManagerMetrics metrics;
@VisibleForTesting
LocalResourcesTracker publicRsrc;
private LocalDirsHandlerService dirsHandler;
private DirsChangeListener localDirsChangeListener;
private DirsChangeListener logDirsChangeListener;
private Context nmContext;
private DiskValidator diskValidator;
/**
* Map of LocalResourceTrackers keyed by username, for private
* resources.
*/
@VisibleForTesting
final ConcurrentMap<String, LocalResourcesTracker> privateRsrc =
new ConcurrentHashMap<String,LocalResourcesTracker>();
/**
* Map of LocalResourceTrackers keyed by appid, for application
* resources.
*/
private final ConcurrentMap<String,LocalResourcesTracker> appRsrc =
new ConcurrentHashMap<String,LocalResourcesTracker>();
FileContext lfs;
public ResourceLocalizationService(Dispatcher dispatcher,
ContainerExecutor exec, DeletionService delService,
LocalDirsHandlerService dirsHandler, Context context,
NodeManagerMetrics metrics) {
super(ResourceLocalizationService.class.getName());
this.exec = exec;
this.dispatcher = dispatcher;
this.delService = delService;
this.dirsHandler = dirsHandler;
this.cacheCleanup = new HadoopScheduledThreadPoolExecutor(1,
new ThreadFactoryBuilder()
.setNameFormat("ResourceLocalizationService Cache Cleanup")
.build());
this.stateStore = context.getNMStateStore();
this.nmContext = context;
this.metrics = metrics;
}
FileContext getLocalFileContext(Configuration conf) {
try {
return FileContext.getLocalFSFileContext(conf);
} catch (IOException e) {
throw new YarnRuntimeException("Failed to access local fs");
}
}
private void validateConf(Configuration conf) {
int perDirFileLimit =
conf.getInt(YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY,
YarnConfiguration.DEFAULT_NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY);
if (perDirFileLimit <= 36) {
LOG.error(YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY
+ " parameter is configured with very low value.");
throw new YarnRuntimeException(
YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY
+ " parameter is configured with a value less than 37.");
} else {
LOG.info("per directory file limit = " + perDirFileLimit);
}
}
@Override
public void serviceInit(Configuration conf) throws Exception {
this.validateConf(conf);
this.publicRsrc = new LocalResourcesTrackerImpl(null, null, dispatcher,
true, conf, stateStore, dirsHandler);
this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
try {
lfs = getLocalFileContext(conf);
lfs.setUMask(new FsPermission((short) FsPermission.DEFAULT_UMASK));
if (!stateStore.canRecover()|| stateStore.isNewlyCreated()) {
cleanUpLocalDirs(lfs, delService);
cleanupLogDirs(lfs, delService);
initializeLocalDirs(lfs);
initializeLogDirs(lfs);
}
} catch (Exception e) {
throw new YarnRuntimeException(
"Failed to initialize LocalizationService", e);
}
diskValidator = DiskValidatorFactory.getInstance(
YarnConfiguration.DEFAULT_DISK_VALIDATOR);
cacheTargetSize =
conf.getLong(YarnConfiguration.NM_LOCALIZER_CACHE_TARGET_SIZE_MB, YarnConfiguration.DEFAULT_NM_LOCALIZER_CACHE_TARGET_SIZE_MB) << 20;
cacheCleanupPeriod =
conf.getLong(YarnConfiguration.NM_LOCALIZER_CACHE_CLEANUP_INTERVAL_MS, YarnConfiguration.DEFAULT_NM_LOCALIZER_CACHE_CLEANUP_INTERVAL_MS);
localizationServerAddress = conf.getSocketAddr(
YarnConfiguration.NM_BIND_HOST,
YarnConfiguration.NM_LOCALIZER_ADDRESS,
YarnConfiguration.DEFAULT_NM_LOCALIZER_ADDRESS,
YarnConfiguration.DEFAULT_NM_LOCALIZER_PORT);
localizerTracker = createLocalizerTracker(conf);
addService(localizerTracker);
dispatcher.register(LocalizerEventType.class, localizerTracker);
localDirsChangeListener = new DirsChangeListener() {
@Override
public void onDirsChanged() {
checkAndInitializeLocalDirs();
}
};
logDirsChangeListener = new DirsChangeListener() {
@Override
public void onDirsChanged() {
initializeLogDirs(lfs);
}
};
super.serviceInit(conf);
}
//Recover localized resources after an NM restart
public void recoverLocalizedResources(RecoveredLocalizationState state)
throws URISyntaxException, IOException {
LocalResourceTrackerState trackerState = state.getPublicTrackerState();
recoverTrackerResources(publicRsrc, trackerState);
try (RecoveryIterator<Map.Entry<String, RecoveredUserResources>> it
= state.getIterator()) {
while (it.hasNext()) {
Map.Entry<String, RecoveredUserResources> userEntry = it.next();
String user = userEntry.getKey();
RecoveredUserResources userResources = userEntry.getValue();
trackerState = userResources.getPrivateTrackerState();
if (!trackerState.isEmpty()) {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
null, dispatcher, true, super.getConfig(), stateStore,
dirsHandler);
LocalResourcesTracker oldTracker = privateRsrc.putIfAbsent(user,
tracker);
if (oldTracker != null) {
tracker = oldTracker;
}
recoverTrackerResources(tracker, trackerState);
}
for (Map.Entry<ApplicationId, LocalResourceTrackerState> appEntry :
userResources.getAppTrackerStates().entrySet()) {
trackerState = appEntry.getValue();
if (!trackerState.isEmpty()) {
ApplicationId appId = appEntry.getKey();
String appIdStr = appId.toString();
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, super.getConfig(), stateStore,
dirsHandler);
LocalResourcesTracker oldTracker = appRsrc.putIfAbsent(appIdStr,
tracker);
if (oldTracker != null) {
tracker = oldTracker;
}
recoverTrackerResources(tracker, trackerState);
}
}
}
}
}
private void recoverTrackerResources(LocalResourcesTracker tracker,
LocalResourceTrackerState state) throws URISyntaxException {
for (LocalizedResourceProto proto : state.getLocalizedResources()) {
LocalResource rsrc = new LocalResourcePBImpl(proto.getResource());
LocalResourceRequest req = new LocalResourceRequest(rsrc);
if (LOG.isDebugEnabled()) {
LOG.debug("Recovering localized resource " + req + " at "
+ proto.getLocalPath());
}
tracker.handle(new ResourceRecoveredEvent(req,
new Path(proto.getLocalPath()), proto.getSize()));
}
for (Map.Entry<LocalResourceProto, Path> entry :
state.getInProgressResources().entrySet()) {
LocalResource rsrc = new LocalResourcePBImpl(entry.getKey());
LocalResourceRequest req = new LocalResourceRequest(rsrc);
Path localPath = entry.getValue();
tracker.handle(new ResourceRecoveredEvent(req, localPath, 0));
// delete any in-progress localizations, containers will request again
LOG.info("Deleting in-progress localization for " + req + " at "
+ localPath);
tracker.remove(tracker.getLocalizedResource(req), delService);
}
// TODO: remove untracked directories in local filesystem
}
@Override
public LocalizerHeartbeatResponse heartbeat(LocalizerStatus status) {
return localizerTracker.processHeartbeat(status);
}
@Override
public void serviceStart() throws Exception {
cacheCleanup.scheduleWithFixedDelay(new CacheCleanup(dispatcher),
cacheCleanupPeriod, cacheCleanupPeriod, TimeUnit.MILLISECONDS);
server = createServer();
server.start();
localizationServerAddress =
getConfig().updateConnectAddr(YarnConfiguration.NM_BIND_HOST,
YarnConfiguration.NM_LOCALIZER_ADDRESS,
YarnConfiguration.DEFAULT_NM_LOCALIZER_ADDRESS,
server.getListenerAddress());
LOG.info("Localizer started on port " + server.getPort());
super.serviceStart();
dirsHandler.registerLocalDirsChangeListener(localDirsChangeListener);
dirsHandler.registerLogDirsChangeListener(logDirsChangeListener);
}
LocalizerTracker createLocalizerTracker(Configuration conf) {
return new LocalizerTracker(conf);
}
Server createServer() {
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
if (UserGroupInformation.isSecurityEnabled()) {
secretManager = new LocalizerTokenSecretManager();
}
Server server = rpc.getServer(LocalizationProtocol.class, this,
localizationServerAddress, conf, secretManager,
conf.getInt(YarnConfiguration.NM_LOCALIZER_CLIENT_THREAD_COUNT,
YarnConfiguration.DEFAULT_NM_LOCALIZER_CLIENT_THREAD_COUNT));
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
server.refreshServiceAcl(conf, NMPolicyProvider.getInstance());
}
return server;
}
@Override
public void serviceStop() throws Exception {
dirsHandler.deregisterLocalDirsChangeListener(localDirsChangeListener);
dirsHandler.deregisterLogDirsChangeListener(logDirsChangeListener);
if (server != null) {
server.stop();
}
cacheCleanup.shutdown();
super.serviceStop();
}
@Override
public void handle(LocalizationEvent event) {
// TODO: create log dir as $logdir/$user/$appId
switch (event.getType()) {
case INIT_APPLICATION_RESOURCES:
handleInitApplicationResources(
((ApplicationLocalizationEvent)event).getApplication());
break;
case LOCALIZE_CONTAINER_RESOURCES:
handleInitContainerResources((ContainerLocalizationRequestEvent) event);
break;
case CONTAINER_RESOURCES_LOCALIZED:
handleContainerResourcesLocalized((ContainerLocalizationEvent) event);
break;
case CACHE_CLEANUP:
handleCacheCleanup();
break;
case CLEANUP_CONTAINER_RESOURCES:
handleCleanupContainerResources((ContainerLocalizationCleanupEvent)event);
break;
case DESTROY_APPLICATION_RESOURCES:
handleDestroyApplicationResources(
((ApplicationLocalizationEvent)event).getApplication());
break;
default:
throw new YarnRuntimeException("Unknown localization event: " + event);
}
}
/**
* Handle event received the first time any container is scheduled
* by a given application.
*/
@SuppressWarnings("unchecked")
private void handleInitApplicationResources(Application app) {
// 0) Create application tracking structs
String userName = app.getUser();
privateRsrc.putIfAbsent(userName, new LocalResourcesTrackerImpl(userName,
null, dispatcher, true, super.getConfig(), stateStore, dirsHandler));
String appIdStr = app.getAppId().toString();
appRsrc.putIfAbsent(appIdStr, new LocalResourcesTrackerImpl(app.getUser(),
app.getAppId(), dispatcher, false, super.getConfig(), stateStore,
dirsHandler));
// 1) Signal container init
//
// This is handled by the ApplicationImpl state machine and allows
// containers to proceed with launching.
dispatcher.getEventHandler().handle(new ApplicationInitedEvent(
app.getAppId()));
}
/**
* For each of the requested resources for a container, determines the
* appropriate {@link LocalResourcesTracker} and forwards a
* {@link LocalResourceRequest} to that tracker.
*/
private void handleInitContainerResources(
ContainerLocalizationRequestEvent rsrcReqs) {
Container c = rsrcReqs.getContainer();
EnumSet<ContainerState> set =
EnumSet.of(ContainerState.LOCALIZING,
ContainerState.RUNNING, ContainerState.REINITIALIZING);
if (!set.contains(c.getContainerState())) {
LOG.warn(c.getContainerId() + " is at " + c.getContainerState()
+ " state, do not localize resources.");
return;
}
// create a loading cache for the file statuses
LoadingCache<Path,Future<FileStatus>> statCache =
CacheBuilder.newBuilder().build(FSDownload.createStatusCacheLoader(getConfig()));
LocalizerContext ctxt = new LocalizerContext(
c.getUser(), c.getContainerId(), c.getCredentials(), statCache);
Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrcs =
rsrcReqs.getRequestedResources();
for (Map.Entry<LocalResourceVisibility, Collection<LocalResourceRequest>> e :
rsrcs.entrySet()) {
LocalResourcesTracker tracker =
getLocalResourcesTracker(e.getKey(), c.getUser(),
c.getContainerId().getApplicationAttemptId()
.getApplicationId());
for (LocalResourceRequest req : e.getValue()) {
tracker.handle(new ResourceRequestEvent(req, e.getKey(), ctxt));
if (LOG.isDebugEnabled()) {
LOG.debug("Localizing " + req.getPath() +
" for container " + c.getContainerId());
}
}
}
}
/**
* Once a container's resources are localized, kill the corresponding
* {@link ContainerLocalizer}
*/
private void handleContainerResourcesLocalized(
ContainerLocalizationEvent event) {
Container c = event.getContainer();
String locId = c.getContainerId().toString();
localizerTracker.endContainerLocalization(locId);
}
@VisibleForTesting
LocalCacheCleanerStats handleCacheCleanup() {
LocalCacheCleaner cleaner =
new LocalCacheCleaner(delService, cacheTargetSize);
cleaner.addResources(publicRsrc);
for (LocalResourcesTracker t : privateRsrc.values()) {
cleaner.addResources(t);
}
LocalCacheCleaner.LocalCacheCleanerStats stats = cleaner.cleanCache();
if (LOG.isDebugEnabled()) {
LOG.debug(stats.toStringDetailed());
} else if (LOG.isInfoEnabled()) {
LOG.info(stats.toString());
}
// Update metrics
metrics.setCacheSizeBeforeClean(stats.getCacheSizeBeforeClean());
metrics.setTotalBytesDeleted(stats.getTotalDelSize());
metrics.setPrivateBytesDeleted(stats.getPrivateDelSize());
metrics.setPublicBytesDeleted(stats.getPublicDelSize());
return stats;
}
@SuppressWarnings("unchecked")
private void handleCleanupContainerResources(
ContainerLocalizationCleanupEvent rsrcCleanup) {
Container c = rsrcCleanup.getContainer();
Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrcs =
rsrcCleanup.getResources();
for (Map.Entry<LocalResourceVisibility, Collection<LocalResourceRequest>> e :
rsrcs.entrySet()) {
LocalResourcesTracker tracker = getLocalResourcesTracker(e.getKey(), c.getUser(),
c.getContainerId().getApplicationAttemptId()
.getApplicationId());
for (LocalResourceRequest req : e.getValue()) {
tracker.handle(new ResourceReleaseEvent(req,
c.getContainerId()));
}
}
String locId = c.getContainerId().toString();
localizerTracker.cleanupPrivLocalizers(locId);
// Delete the container directories
String userName = c.getUser();
String containerIDStr = c.toString();
String appIDStr =
c.getContainerId().getApplicationAttemptId().getApplicationId()
.toString();
// Try deleting from good local dirs and full local dirs because a dir might
// have gone bad while the app was running(disk full). In addition
// a dir might have become good while the app was running.
// Check if the container dir exists and if it does, try to delete it
for (String localDir : dirsHandler.getLocalDirsForCleanup()) {
// Delete the user-owned container-dir
Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE);
Path userdir = new Path(usersdir, userName);
Path allAppsdir = new Path(userdir, ContainerLocalizer.APPCACHE);
Path appDir = new Path(allAppsdir, appIDStr);
Path containerDir = new Path(appDir, containerIDStr);
submitDirForDeletion(userName, containerDir);
// Delete the nmPrivate container-dir
Path sysDir = new Path(localDir, NM_PRIVATE_DIR);
Path appSysDir = new Path(sysDir, appIDStr);
Path containerSysDir = new Path(appSysDir, containerIDStr);
submitDirForDeletion(null, containerSysDir);
}
dispatcher.getEventHandler().handle(
new ContainerEvent(c.getContainerId(),
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
}
private void submitDirForDeletion(String userName, Path dir) {
try {
lfs.getFileStatus(dir);
FileDeletionTask deletionTask = new FileDeletionTask(delService, userName,
dir, null);
delService.delete(deletionTask);
} catch (UnsupportedFileSystemException ue) {
LOG.warn("Local dir " + dir + " is an unsupported filesystem", ue);
} catch (IOException ie) {
// ignore
return;
}
}
@SuppressWarnings({"unchecked"})
private void handleDestroyApplicationResources(Application application) {
String userName = application.getUser();
ApplicationId appId = application.getAppId();
String appIDStr = application.toString();
LocalResourcesTracker appLocalRsrcsTracker =
appRsrc.remove(appId.toString());
if (appLocalRsrcsTracker != null) {
for (LocalizedResource rsrc : appLocalRsrcsTracker ) {
Path localPath = rsrc.getLocalPath();
if (localPath != null) {
try {
stateStore.removeLocalizedResource(userName, appId, localPath);
} catch (IOException e) {
LOG.error("Unable to remove resource " + rsrc + " for " + appIDStr
+ " from state store", e);
}
}
}
} else {
LOG.warn("Removing uninitialized application " + application);
}
// Delete the application directories
userName = application.getUser();
appIDStr = application.toString();
for (String localDir : dirsHandler.getLocalDirsForCleanup()) {
// Delete the user-owned app-dir
Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE);
Path userdir = new Path(usersdir, userName);
Path allAppsdir = new Path(userdir, ContainerLocalizer.APPCACHE);
Path appDir = new Path(allAppsdir, appIDStr);
submitDirForDeletion(userName, appDir);
// Delete the nmPrivate app-dir
Path sysDir = new Path(localDir, NM_PRIVATE_DIR);
Path appSysDir = new Path(sysDir, appIDStr);
submitDirForDeletion(null, appSysDir);
}
// TODO: decrement reference counts of all resources associated with this
// app
dispatcher.getEventHandler().handle(new ApplicationEvent(
application.getAppId(),
ApplicationEventType.APPLICATION_RESOURCES_CLEANEDUP));
}
LocalResourcesTracker getLocalResourcesTracker(
LocalResourceVisibility visibility, String user, ApplicationId appId) {
switch (visibility) {
default:
case PUBLIC:
return publicRsrc;
case PRIVATE:
return privateRsrc.get(user);
case APPLICATION:
return appRsrc.get(appId.toString());
}
}
private String getUserFileCachePath(String user) {
return StringUtils.join(Path.SEPARATOR, Arrays.asList(".",
ContainerLocalizer.USERCACHE, user, ContainerLocalizer.FILECACHE));
}
private String getAppFileCachePath(String user, String appId) {
return StringUtils.join(Path.SEPARATOR, Arrays.asList(".",
ContainerLocalizer.USERCACHE, user, ContainerLocalizer.APPCACHE, appId,
ContainerLocalizer.FILECACHE));
}
@VisibleForTesting
@Private
public PublicLocalizer getPublicLocalizer() {
return localizerTracker.publicLocalizer;
}
@VisibleForTesting
@Private
public LocalizerRunner getLocalizerRunner(String locId) {
return localizerTracker.privLocalizers.get(locId);
}
@VisibleForTesting
@Private
public Map<String, LocalizerRunner> getPrivateLocalizers() {
return localizerTracker.privLocalizers;
}
/**
* Sub-component handling the spawning of {@link ContainerLocalizer}s
*/
class LocalizerTracker extends AbstractService implements EventHandler<LocalizerEvent> {
private final PublicLocalizer publicLocalizer;
private final Map<String,LocalizerRunner> privLocalizers;
LocalizerTracker(Configuration conf) {
this(conf, new HashMap<String,LocalizerRunner>());
}
LocalizerTracker(Configuration conf,
Map<String,LocalizerRunner> privLocalizers) {
super(LocalizerTracker.class.getName());
this.publicLocalizer = new PublicLocalizer(conf);
this.privLocalizers = privLocalizers;
}
@Override
public synchronized void serviceStart() throws Exception {
publicLocalizer.start();
super.serviceStart();
}
public LocalizerHeartbeatResponse processHeartbeat(LocalizerStatus status) {
String locId = status.getLocalizerId();
synchronized (privLocalizers) {
LocalizerRunner localizer = privLocalizers.get(locId);
if (null == localizer) {
// TODO process resources anyway
LOG.info("Unknown localizer with localizerId " + locId
+ " is sending heartbeat. Ordering it to DIE");
LocalizerHeartbeatResponse response =
recordFactory.newRecordInstance(LocalizerHeartbeatResponse.class);
response.setLocalizerAction(LocalizerAction.DIE);
return response;
}
return localizer.processHeartbeat(status.getResources());
}
}
@Override
public void serviceStop() throws Exception {
for (LocalizerRunner localizer : privLocalizers.values()) {
localizer.interrupt();
}
publicLocalizer.interrupt();
super.serviceStop();
}
@Override
public void handle(LocalizerEvent event) {
String locId = event.getLocalizerId();
switch (event.getType()) {
case REQUEST_RESOURCE_LOCALIZATION:
// 0) find running localizer or start new thread
LocalizerResourceRequestEvent req =
(LocalizerResourceRequestEvent)event;
switch (req.getVisibility()) {
case PUBLIC:
publicLocalizer.addResource(req);
break;
case PRIVATE:
case APPLICATION:
synchronized (privLocalizers) {
LocalizerRunner localizer = privLocalizers.get(locId);
if (localizer != null && localizer.killContainerLocalizer.get()) {
// Old localizer thread has been stopped, remove it and creates
// a new localizer thread.
LOG.info("New " + event.getType() + " localize request for "
+ locId + ", remove old private localizer.");
cleanupPrivLocalizers(locId);
localizer = null;
}
if (null == localizer) {
LOG.info("Created localizer for " + locId);
localizer = new LocalizerRunner(req.getContext(), locId);
privLocalizers.put(locId, localizer);
localizer.start();
}
// 1) propagate event
localizer.addResource(req);
}
break;
}
break;
}
}
public void cleanupPrivLocalizers(String locId) {
synchronized (privLocalizers) {
LocalizerRunner localizer = privLocalizers.get(locId);
if (null == localizer) {
return; // ignore; already gone
}
privLocalizers.remove(locId);
localizer.interrupt();
}
}
public void endContainerLocalization(String locId) {
LocalizerRunner localizer;
synchronized (privLocalizers) {
localizer = privLocalizers.get(locId);
if (null == localizer) {
return; // ignore
}
}
localizer.endContainerLocalization();
}
}
private static ExecutorService createLocalizerExecutor(Configuration conf) {
int nThreads = conf.getInt(
YarnConfiguration.NM_LOCALIZER_FETCH_THREAD_COUNT,
YarnConfiguration.DEFAULT_NM_LOCALIZER_FETCH_THREAD_COUNT);
ThreadFactory tf = new ThreadFactoryBuilder()
.setNameFormat("PublicLocalizer #%d")
.build();
return HadoopExecutors.newFixedThreadPool(nThreads, tf);
}
class PublicLocalizer extends Thread {
final FileContext lfs;
final Configuration conf;
final ExecutorService threadPool;
final CompletionService<Path> queue;
// Its shared between public localizer and dispatcher thread.
final Map<Future<Path>,LocalizerResourceRequestEvent> pending;
PublicLocalizer(Configuration conf) {
super("Public Localizer");
this.lfs = getLocalFileContext(conf);
this.conf = conf;
this.pending = Collections.synchronizedMap(
new HashMap<Future<Path>, LocalizerResourceRequestEvent>());
this.threadPool = createLocalizerExecutor(conf);
this.queue = new ExecutorCompletionService<Path>(threadPool);
}
public void addResource(LocalizerResourceRequestEvent request) {
// TODO handle failures, cancellation, requests by other containers
LocalizedResource rsrc = request.getResource();
LocalResourceRequest key = rsrc.getRequest();
LOG.info("Downloading public resource: " + key);
/*
* Here multiple containers may request the same resource. So we need
* to start downloading only when
* 1) ResourceState == DOWNLOADING
* 2) We are able to acquire non blocking semaphore lock.
* If not we will skip this resource as either it is getting downloaded
* or it FAILED / LOCALIZED.
*/
if (rsrc.tryAcquire()) {
if (rsrc.getState() == ResourceState.DOWNLOADING) {
LocalResource resource = request.getResource().getRequest();
try {
Path publicRootPath =
dirsHandler.getLocalPathForWrite("." + Path.SEPARATOR
+ ContainerLocalizer.FILECACHE,
ContainerLocalizer.getEstimatedSize(resource), true);
Path publicDirDestPath =
publicRsrc.getPathForLocalization(key, publicRootPath,
delService);
if (!publicDirDestPath.getParent().equals(publicRootPath)) {
createParentDirs(publicDirDestPath, publicRootPath);
if (diskValidator != null) {
diskValidator.checkStatus(
new File(publicDirDestPath.toUri().getPath()));
} else {
throw new DiskChecker.DiskErrorException(
"Disk Validator is null!");
}
}
// explicitly synchronize pending here to avoid future task
// completing and being dequeued before pending updated
synchronized (pending) {
pending.put(queue.submit(new FSDownload(lfs, null, conf,
publicDirDestPath, resource, request.getContext().getStatCache())),
request);
}
} catch (IOException e) {
rsrc.unlock();
publicRsrc.handle(new ResourceFailedLocalizationEvent(request
.getResource().getRequest(), e.getMessage()));
LOG.error("Local path for public localization is not found. "
+ " May be disks failed.", e);
} catch (IllegalArgumentException ie) {
rsrc.unlock();
publicRsrc.handle(new ResourceFailedLocalizationEvent(request
.getResource().getRequest(), ie.getMessage()));
LOG.error("Local path for public localization is not found. "
+ " Incorrect path. " + request.getResource().getRequest()
.getPath(), ie);
} catch (RejectedExecutionException re) {
rsrc.unlock();
publicRsrc.handle(new ResourceFailedLocalizationEvent(request
.getResource().getRequest(), re.getMessage()));
LOG.error("Failed to submit rsrc " + rsrc + " for download."
+ " Either queue is full or threadpool is shutdown.", re);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Skip downloading resource: " + key + " since it's in"
+ " state: " + rsrc.getState());
}
rsrc.unlock();
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Skip downloading resource: " + key + " since it is locked"
+ " by other threads");
}
}
}
private void createParentDirs(Path destDirPath, Path destDirRoot)
throws IOException {
if (destDirPath == null || destDirPath.equals(destDirRoot)) {
return;
}
createParentDirs(destDirPath.getParent(), destDirRoot);
createDir(destDirPath, PUBLIC_FILECACHE_FOLDER_PERMS);
}
private void createDir(Path dirPath, FsPermission perms)
throws IOException {
lfs.mkdir(dirPath, perms, false);
if (!perms.equals(perms.applyUMask(lfs.getUMask()))) {
lfs.setPermission(dirPath, perms);
}
}
@Override
public void run() {
try {
// TODO shutdown, better error handling esp. DU
while (!Thread.currentThread().isInterrupted()) {
try {
Future<Path> completed = queue.take();
LocalizerResourceRequestEvent assoc = pending.remove(completed);
try {
if (null == assoc) {
LOG.error("Localized unknown resource to " + completed);
// TODO delete
return;
}
Path local = completed.get();
LocalResourceRequest key = assoc.getResource().getRequest();
publicRsrc.handle(new ResourceLocalizedEvent(key, local, FileUtil
.getDU(new File(local.toUri()))));
assoc.getResource().unlock();
} catch (ExecutionException e) {
String user = assoc.getContext().getUser();
ApplicationId applicationId = assoc.getContext().getContainerId().getApplicationAttemptId().getApplicationId();
LocalResourcesTracker tracker =
getLocalResourcesTracker(LocalResourceVisibility.APPLICATION, user, applicationId);
final String diagnostics = "Failed to download resource " +
assoc.getResource() + " " + e.getCause();
tracker.handle(new ResourceFailedLocalizationEvent(
assoc.getResource().getRequest(), diagnostics));
publicRsrc.handle(new ResourceFailedLocalizationEvent(
assoc.getResource().getRequest(), diagnostics));
LOG.error(diagnostics);
assoc.getResource().unlock();
} catch (CancellationException e) {
// ignore; shutting down
}
} catch (InterruptedException e) {
return;
}
}
} catch(Throwable t) {
LOG.error("Error: Shutting down", t);
} finally {
LOG.info("Public cache exiting");
threadPool.shutdownNow();
}
}
}
/**
* Runs the {@link ContainerLocalizer} itself in a separate process with
* access to user's credentials. One {@link LocalizerRunner} per localizerId.
*
*/
class LocalizerRunner extends Thread {
final LocalizerContext context;
final String localizerId;
final Map<LocalResourceRequest,LocalizerResourceRequestEvent> scheduled;
// Its a shared list between Private Localizer and dispatcher thread.
final List<LocalizerResourceRequestEvent> pending;
private AtomicBoolean killContainerLocalizer = new AtomicBoolean(false);
// TODO: threadsafe, use outer?
private final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(getConfig());
LocalizerRunner(LocalizerContext context, String localizerId) {
super("LocalizerRunner for " + localizerId);
this.context = context;
this.localizerId = localizerId;
this.pending =
Collections
.synchronizedList(new ArrayList<LocalizerResourceRequestEvent>());
this.scheduled =
new HashMap<LocalResourceRequest, LocalizerResourceRequestEvent>();
}
public void addResource(LocalizerResourceRequestEvent request) {
pending.add(request);
}
public void endContainerLocalization() {
killContainerLocalizer.set(true);
}
/**
* Find next resource to be given to a spawned localizer.
*
* @return the next resource to be localized
*/
private LocalResource findNextResource() {
synchronized (pending) {
for (Iterator<LocalizerResourceRequestEvent> i = pending.iterator();
i.hasNext();) {
LocalizerResourceRequestEvent evt = i.next();
LocalizedResource nRsrc = evt.getResource();
// Resource download should take place ONLY if resource is in
// Downloading state
if (nRsrc.getState() != ResourceState.DOWNLOADING) {
i.remove();
continue;
}
/*
* Multiple containers will try to download the same resource. So the
* resource download should start only if
* 1) We can acquire a non blocking semaphore lock on resource
* 2) Resource is still in DOWNLOADING state
*/
if (nRsrc.tryAcquire()) {
if (nRsrc.getState() == ResourceState.DOWNLOADING) {
LocalResourceRequest nextRsrc = nRsrc.getRequest();
LocalResource next =
recordFactory.newRecordInstance(LocalResource.class);
next.setResource(URL.fromPath(nextRsrc
.getPath()));
next.setTimestamp(nextRsrc.getTimestamp());
next.setType(nextRsrc.getType());
next.setVisibility(evt.getVisibility());
next.setPattern(evt.getPattern());
scheduled.put(nextRsrc, evt);
return next;
} else {
// Need to release acquired lock
nRsrc.unlock();
}
}
}
return null;
}
}
LocalizerHeartbeatResponse processHeartbeat(
List<LocalResourceStatus> remoteResourceStatuses) {
LocalizerHeartbeatResponse response =
recordFactory.newRecordInstance(LocalizerHeartbeatResponse.class);
String user = context.getUser();
ApplicationId applicationId =
context.getContainerId().getApplicationAttemptId().getApplicationId();
boolean fetchFailed = false;
// Update resource statuses.
for (LocalResourceStatus stat : remoteResourceStatuses) {
LocalResource rsrc = stat.getResource();
LocalResourceRequest req = null;
try {
req = new LocalResourceRequest(rsrc);
} catch (URISyntaxException e) {
LOG.error(
"Got exception in parsing URL of LocalResource:"
+ rsrc.getResource(), e);
continue;
}
LocalizerResourceRequestEvent assoc = scheduled.get(req);
if (assoc == null) {
// internal error
LOG.error("Unknown resource reported: " + req);
continue;
}
LocalResourcesTracker tracker =
getLocalResourcesTracker(req.getVisibility(), user, applicationId);
if (tracker == null) {
// This is likely due to a race between heartbeat and
// app cleaning up.
continue;
}
switch (stat.getStatus()) {
case FETCH_SUCCESS:
// notify resource
try {
tracker.handle(new ResourceLocalizedEvent(req,
stat.getLocalPath().toPath(), stat.getLocalSize()));
} catch (URISyntaxException e) { }
// unlocking the resource and removing it from scheduled resource
// list
assoc.getResource().unlock();
scheduled.remove(req);
break;
case FETCH_PENDING:
break;
case FETCH_FAILURE:
final String diagnostics = stat.getException().toString();
LOG.warn(req + " failed: " + diagnostics);
fetchFailed = true;
tracker.handle(new ResourceFailedLocalizationEvent(req,
diagnostics));
// unlocking the resource and removing it from scheduled resource
// list
assoc.getResource().unlock();
scheduled.remove(req);
break;
default:
LOG.info("Unknown status: " + stat.getStatus());
fetchFailed = true;
tracker.handle(new ResourceFailedLocalizationEvent(req,
stat.getException().getMessage()));
break;
}
}
if (fetchFailed || killContainerLocalizer.get()) {
response.setLocalizerAction(LocalizerAction.DIE);
return response;
}
// Give the localizer resources for remote-fetching.
List<ResourceLocalizationSpec> rsrcs =
new ArrayList<ResourceLocalizationSpec>();
/*
* TODO : It doesn't support multiple downloads per ContainerLocalizer
* at the same time. We need to think whether we should support this.
*/
LocalResource next = findNextResource();
if (next != null) {
try {
LocalResourcesTracker tracker = getLocalResourcesTracker(
next.getVisibility(), user, applicationId);
if (tracker != null) {
ResourceLocalizationSpec resource =
NodeManagerBuilderUtils.newResourceLocalizationSpec(next,
getPathForLocalization(next, tracker));
rsrcs.add(resource);
}
} catch (IOException e) {
LOG.error("local path for PRIVATE localization could not be " +
"found. Disks might have failed.", e);
} catch (IllegalArgumentException e) {
LOG.error("Incorrect path for PRIVATE localization."
+ next.getResource().getFile(), e);
} catch (URISyntaxException e) {
LOG.error(
"Got exception in parsing URL of LocalResource:"
+ next.getResource(), e);
}
}
response.setLocalizerAction(LocalizerAction.LIVE);
response.setResourceSpecs(rsrcs);
return response;
}
private Path getPathForLocalization(LocalResource rsrc,
LocalResourcesTracker tracker) throws IOException, URISyntaxException {
String user = context.getUser();
ApplicationId appId =
context.getContainerId().getApplicationAttemptId().getApplicationId();
LocalResourceVisibility vis = rsrc.getVisibility();
String cacheDirectory = null;
if (vis == LocalResourceVisibility.PRIVATE) {// PRIVATE Only
cacheDirectory = getUserFileCachePath(user);
} else {// APPLICATION ONLY
cacheDirectory = getAppFileCachePath(user, appId.toString());
}
Path dirPath =
dirsHandler.getLocalPathForWrite(cacheDirectory,
ContainerLocalizer.getEstimatedSize(rsrc), false);
return tracker.getPathForLocalization(new LocalResourceRequest(rsrc),
dirPath, delService);
}
@Override
@SuppressWarnings("unchecked") // dispatcher not typed
public void run() {
Path nmPrivateCTokensPath = null;
Throwable exception = null;
try {
// Get nmPrivateDir
nmPrivateCTokensPath =
dirsHandler.getLocalPathForWrite(
NM_PRIVATE_DIR + Path.SEPARATOR
+ String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT,
localizerId));
// 0) init queue, etc.
// 1) write credentials to private dir
writeCredentials(nmPrivateCTokensPath);
// 2) exec initApplication and wait
if (dirsHandler.areDisksHealthy()) {
exec.startLocalizer(new LocalizerStartContext.Builder()
.setNmPrivateContainerTokens(nmPrivateCTokensPath)
.setNmAddr(localizationServerAddress)
.setUser(context.getUser())
.setAppId(context.getContainerId()
.getApplicationAttemptId().getApplicationId().toString())
.setLocId(localizerId)
.setDirsHandler(dirsHandler)
.build());
} else {
throw new IOException("All disks failed. "
+ dirsHandler.getDisksHealthReport(false));
}
// TODO handle ExitCodeException separately?
} catch (FSError fe) {
exception = fe;
} catch (Exception e) {
exception = e;
} finally {
if (exception != null) {
LOG.info("Localizer failed for "+localizerId, exception);
// On error, report failure to Container and signal ABORT
// Notify resource of failed localization
ContainerId cId = context.getContainerId();
dispatcher.getEventHandler().handle(new ContainerResourceFailedEvent(
cId, null, exception.getMessage()));
}
List<Path> paths = new ArrayList<Path>();
for (LocalizerResourceRequestEvent event : scheduled.values()) {
// This means some resources were in downloading state. Schedule
// deletion task for localization dir and tmp dir used for downloading
Path locRsrcPath = event.getResource().getLocalPath();
if (locRsrcPath != null) {
Path locRsrcDirPath = locRsrcPath.getParent();
paths.add(locRsrcDirPath);
paths.add(new Path(locRsrcDirPath + "_tmp"));
}
event.getResource().unlock();
}
if (!paths.isEmpty()) {
FileDeletionTask deletionTask = new FileDeletionTask(delService,
context.getUser(), null, paths);
delService.delete(deletionTask);
}
FileDeletionTask deletionTask = new FileDeletionTask(delService, null,
nmPrivateCTokensPath, null);
delService.delete(deletionTask);
}
}
private Credentials getSystemCredentialsSentFromRM(
LocalizerContext localizerContext) throws IOException {
ApplicationId appId =
localizerContext.getContainerId().getApplicationAttemptId()
.getApplicationId();
Credentials systemCredentials =
nmContext.getSystemCredentialsForApps().get(appId);
if (systemCredentials == null) {
return null;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Adding new framework-token for " + appId
+ " for localization: " + systemCredentials.getAllTokens());
}
return systemCredentials;
}
private void writeCredentials(Path nmPrivateCTokensPath)
throws IOException {
DataOutputStream tokenOut = null;
try {
Credentials credentials = context.getCredentials();
if (UserGroupInformation.isSecurityEnabled()) {
Credentials systemCredentials =
getSystemCredentialsSentFromRM(context);
if (systemCredentials != null) {
credentials = systemCredentials;
}
}
FileContext lfs = getLocalFileContext(getConfig());
tokenOut =
lfs.create(nmPrivateCTokensPath, EnumSet.of(CREATE, OVERWRITE));
LOG.info("Writing credentials to the nmPrivate file "
+ nmPrivateCTokensPath.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("Credentials list in " + nmPrivateCTokensPath.toString()
+ ": ");
for (Token<? extends TokenIdentifier> tk : credentials
.getAllTokens()) {
LOG.debug(tk + " : " + buildTokenFingerprint(tk));
}
}
if (UserGroupInformation.isSecurityEnabled()) {
credentials = new Credentials(credentials);
LocalizerTokenIdentifier id = secretManager.createIdentifier();
Token<LocalizerTokenIdentifier> localizerToken =
new Token<LocalizerTokenIdentifier>(id, secretManager);
credentials.addToken(id.getKind(), localizerToken);
}
credentials.writeTokenStorageToStream(tokenOut);
} finally {
if (tokenOut != null) {
tokenOut.close();
}
}
}
}
/**
* Returns a fingerprint of a token. The fingerprint is suitable for use in
* logging, because it cannot be used to determine the secret. The
* fingerprint is built using the first 10 bytes of a SHA-256 hash of the
* string encoding of the token. The returned string contains the hex
* representation of each byte, delimited by a space.
*
* @param tk token
* @return token fingerprint
* @throws IOException if there is an I/O error
*/
@VisibleForTesting
static String buildTokenFingerprint(Token<? extends TokenIdentifier> tk)
throws IOException {
char[] digest = DigestUtils.sha256Hex(tk.encodeToUrlString()).toCharArray();
StringBuilder fingerprint = new StringBuilder();
for (int i = 0; i < 10; ++i) {
if (i > 0) {
fingerprint.append(' ');
}
fingerprint.append(digest[2 * i]);
fingerprint.append(digest[2 * i + 1]);
}
return fingerprint.toString();
}
static class CacheCleanup extends Thread {
private final Dispatcher dispatcher;
public CacheCleanup(Dispatcher dispatcher) {
super("CacheCleanup");
this.dispatcher = dispatcher;
}
@Override
@SuppressWarnings("unchecked") // dispatcher not typed
public void run() {
dispatcher.getEventHandler().handle(
new LocalizationEvent(LocalizationEventType.CACHE_CLEANUP));
}
}
private void initializeLocalDirs(FileContext lfs) {
List<String> localDirs = dirsHandler.getLocalDirs();
for (String localDir : localDirs) {
initializeLocalDir(lfs, localDir);
}
}
private void initializeLocalDir(FileContext lfs, String localDir) {
Map<Path, FsPermission> pathPermissionMap = getLocalDirsPathPermissionsMap(localDir);
for (Map.Entry<Path, FsPermission> entry : pathPermissionMap.entrySet()) {
FileStatus status;
try {
status = lfs.getFileStatus(entry.getKey());
}
catch(FileNotFoundException fs) {
status = null;
}
catch(IOException ie) {
String msg = "Could not get file status for local dir " + entry.getKey();
LOG.warn(msg, ie);
throw new YarnRuntimeException(msg, ie);
}
if(status == null) {
try {
lfs.mkdir(entry.getKey(), entry.getValue(), true);
status = lfs.getFileStatus(entry.getKey());
} catch (IOException e) {
String msg = "Could not initialize local dir " + entry.getKey();
LOG.warn(msg, e);
throw new YarnRuntimeException(msg, e);
}
}
FsPermission perms = status.getPermission();
if(!perms.equals(entry.getValue())) {
try {
lfs.setPermission(entry.getKey(), entry.getValue());
}
catch(IOException ie) {
String msg = "Could not set permissions for local dir " + entry.getKey();
LOG.warn(msg, ie);
throw new YarnRuntimeException(msg, ie);
}
}
}
}
private void initializeLogDirs(FileContext lfs) {
List<String> logDirs = dirsHandler.getLogDirs();
for (String logDir : logDirs) {
initializeLogDir(lfs, logDir);
}
}
private void initializeLogDir(FileContext fs, String logDir) {
try {
fs.mkdir(new Path(logDir), null, true);
} catch (FileAlreadyExistsException fe) {
// do nothing
} catch (IOException e) {
String msg = "Could not initialize log dir " + logDir;
LOG.warn(msg, e);
throw new YarnRuntimeException(msg, e);
}
}
private void cleanupLogDirs(FileContext fs, DeletionService del) {
for (String logDir : dirsHandler.getLogDirsForCleanup()) {
try {
cleanupLogDir(fs, del, logDir);
} catch (IOException e) {
LOG.warn("failed to cleanup app log dir " + logDir, e);
}
}
}
private void cleanupLogDir(FileContext fs, DeletionService del,
String logDir) throws IOException {
if (!fs.util().exists(new Path(logDir))){
return;
}
renameAppLogDir(logDir);
deleteAppLogDir(fs, del, logDir);
}
private void renameAppLogDir(String logDir) throws IOException {
long currentTimeStamp = System.currentTimeMillis();
RemoteIterator<FileStatus> fileStatuses =
lfs.listStatus(new Path(logDir));
if (fileStatuses != null) {
while (fileStatuses.hasNext()) {
FileStatus fileStatus = fileStatuses.next();
String appName = fileStatus.getPath().getName();
if (appName.matches("^application_\\d+_\\d+$")) {
lfs.rename(new Path(logDir, appName),
new Path(logDir, appName + "_DEL_" + currentTimeStamp));
}
}
}
}
private void deleteAppLogDir(FileContext fs, DeletionService del,
String logDir) throws IOException {
RemoteIterator<FileStatus> fileStatuses =
fs.listStatus(new Path(logDir));
if (fileStatuses != null) {
while (fileStatuses.hasNext()) {
FileStatus fileStatus = fileStatuses.next();
String appName = fileStatus.getPath().getName();
if (appName.matches("^application_\\d+_\\d+_DEL_\\d+$")) {
LOG.info("delete app log dir," + appName);
FileDeletionTask deletionTask = new FileDeletionTask(del, null,
fileStatus.getPath(), null);
del.delete(deletionTask);
}
}
}
}
private void cleanUpLocalDirs(FileContext lfs, DeletionService del) {
for (String localDir : dirsHandler.getLocalDirsForCleanup()) {
cleanUpLocalDir(lfs, del, localDir);
}
}
private void cleanUpLocalDir(FileContext lfs, DeletionService del,
String localDir) {
long currentTimeStamp = System.currentTimeMillis();
renameLocalDir(lfs, localDir, ContainerLocalizer.USERCACHE,
currentTimeStamp);
renameLocalDir(lfs, localDir, ContainerLocalizer.FILECACHE,
currentTimeStamp);
renameLocalDir(lfs, localDir, ResourceLocalizationService.NM_PRIVATE_DIR,
currentTimeStamp);
try {
deleteLocalDir(lfs, del, localDir);
} catch (IOException e) {
// Do nothing, just give the warning
LOG.warn("Failed to delete localDir: " + localDir);
}
}
private void renameLocalDir(FileContext lfs, String localDir,
String localSubDir, long currentTimeStamp) {
try {
lfs.rename(new Path(localDir, localSubDir), new Path(
localDir, localSubDir + "_DEL_" + currentTimeStamp));
} catch (FileNotFoundException ex) {
// No need to handle this exception
// localSubDir may not be exist
} catch (Exception ex) {
// Do nothing, just give the warning
LOG.warn("Failed to rename the local file under " +
localDir + "/" + localSubDir);
}
}
private void deleteLocalDir(FileContext lfs, DeletionService del,
String localDir) throws IOException {
RemoteIterator<FileStatus> fileStatus = lfs.listStatus(new Path(localDir));
if (fileStatus != null) {
while (fileStatus.hasNext()) {
FileStatus status = fileStatus.next();
try {
if (status.getPath().getName().matches(".*" +
ContainerLocalizer.USERCACHE + "_DEL_.*")) {
LOG.info("usercache path : " + status.getPath().toString());
cleanUpFilesPerUserDir(lfs, del, status.getPath());
} else if (status.getPath().getName()
.matches(".*" + NM_PRIVATE_DIR + "_DEL_.*")
||
status.getPath().getName()
.matches(".*" + ContainerLocalizer.FILECACHE + "_DEL_.*")) {
FileDeletionTask deletionTask = new FileDeletionTask(del, null,
status.getPath(), null);
del.delete(deletionTask);
}
} catch (IOException ex) {
// Do nothing, just give the warning
LOG.warn("Failed to delete this local Directory: " +
status.getPath().getName());
}
}
}
}
private void cleanUpFilesPerUserDir(FileContext lfs, DeletionService del,
Path userDirPath) throws IOException {
RemoteIterator<FileStatus> userDirStatus = lfs.listStatus(userDirPath);
FileDeletionTask dependentDeletionTask = new FileDeletionTask(del, null,
userDirPath, new ArrayList<Path>());
if (userDirStatus != null && userDirStatus.hasNext()) {
List<FileDeletionTask> deletionTasks = new ArrayList<FileDeletionTask>();
while (userDirStatus.hasNext()) {
FileStatus status = userDirStatus.next();
String owner = status.getOwner();
List<Path> pathList = new ArrayList<>();
pathList.add(status.getPath());
FileDeletionTask deletionTask = new FileDeletionTask(del, owner, null,
pathList);
deletionTask.addDeletionTaskDependency(dependentDeletionTask);
deletionTasks.add(deletionTask);
}
for (FileDeletionTask task : deletionTasks) {
del.delete(task);
}
} else {
del.delete(dependentDeletionTask);
}
}
/**
* Check each local dir to ensure it has been setup correctly and will
* attempt to fix any issues it finds.
* @return void
*/
@VisibleForTesting
void checkAndInitializeLocalDirs() {
List<String> dirs = dirsHandler.getLocalDirs();
List<String> checkFailedDirs = new ArrayList<String>();
for (String dir : dirs) {
try {
checkLocalDir(dir);
} catch (YarnRuntimeException e) {
checkFailedDirs.add(dir);
}
}
for (String dir : checkFailedDirs) {
LOG.info("Attempting to initialize " + dir);
initializeLocalDir(lfs, dir);
try {
checkLocalDir(dir);
} catch (YarnRuntimeException e) {
String msg =
"Failed to setup local dir " + dir + ", which was marked as good.";
LOG.warn(msg, e);
throw new YarnRuntimeException(msg, e);
}
}
}
private boolean checkLocalDir(String localDir) {
Map<Path, FsPermission> pathPermissionMap = getLocalDirsPathPermissionsMap(localDir);
for (Map.Entry<Path, FsPermission> entry : pathPermissionMap.entrySet()) {
FileStatus status;
try {
status = lfs.getFileStatus(entry.getKey());
} catch (Exception e) {
String msg =
"Could not carry out resource dir checks for " + localDir
+ ", which was marked as good";
LOG.warn(msg, e);
throw new YarnRuntimeException(msg, e);
}
if (!status.getPermission().equals(entry.getValue())) {
String msg =
"Permissions incorrectly set for dir " + entry.getKey()
+ ", should be " + entry.getValue() + ", actual value = "
+ status.getPermission();
LOG.warn(msg);
throw new YarnRuntimeException(msg);
}
}
return true;
}
private Map<Path, FsPermission> getLocalDirsPathPermissionsMap(String localDir) {
Map<Path, FsPermission> localDirPathFsPermissionsMap = new HashMap<Path, FsPermission>();
FsPermission defaultPermission =
FsPermission.getDirDefault().applyUMask(lfs.getUMask());
FsPermission nmPrivatePermission =
NM_PRIVATE_PERM.applyUMask(lfs.getUMask());
Path userDir = new Path(localDir, ContainerLocalizer.USERCACHE);
Path fileDir = new Path(localDir, ContainerLocalizer.FILECACHE);
Path sysDir = new Path(localDir, NM_PRIVATE_DIR);
localDirPathFsPermissionsMap.put(userDir, defaultPermission);
localDirPathFsPermissionsMap.put(fileDir, defaultPermission);
localDirPathFsPermissionsMap.put(sysDir, nmPrivatePermission);
return localDirPathFsPermissionsMap;
}
}
|
|
package com.orientechnologies.orient.core.storage.cache.local;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import java.util.zip.CRC32;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.orientechnologies.common.directmemory.ODirectMemoryPointer;
import com.orientechnologies.common.serialization.types.OIntegerSerializer;
import com.orientechnologies.common.serialization.types.OLongSerializer;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.config.OStorageSegmentConfiguration;
import com.orientechnologies.orient.core.exception.OAllCacheEntriesAreUsedException;
import com.orientechnologies.orient.core.storage.cache.OCacheEntry;
import com.orientechnologies.orient.core.storage.cache.OCachePointer;
import com.orientechnologies.orient.core.storage.cache.OPageDataVerificationError;
import com.orientechnologies.orient.core.storage.cache.OWriteCache;
import com.orientechnologies.orient.core.storage.fs.OFileClassic;
import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage;
import com.orientechnologies.orient.core.storage.impl.local.paginated.base.ODurablePage;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.ODiskWriteAheadLog;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OLogSequenceNumber;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OWALRecordsFactory;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.WriteAheadLogTest;
@Test
public class ReadWriteDiskCacheTest {
private int systemOffset = OIntegerSerializer.INT_SIZE + 3 * OLongSerializer.LONG_SIZE;
private O2QCache readBuffer;
private OWriteCache writeBuffer;
private OLocalPaginatedStorage storageLocal;
private String fileName;
private byte seed;
private ODiskWriteAheadLog writeAheadLog;
@BeforeClass
public void beforeClass() throws IOException {
OGlobalConfiguration.FILE_LOCK.setValue(Boolean.FALSE);
String buildDirectory = System.getProperty("buildDirectory");
if (buildDirectory == null)
buildDirectory = ".";
storageLocal = (OLocalPaginatedStorage) Orient.instance().loadStorage("plocal:" + buildDirectory + "/ReadWriteDiskCacheTest");
storageLocal.create(null);
storageLocal.close(true, false);
fileName = "readWriteDiskCacheTest.tst";
OWALRecordsFactory.INSTANCE.registerNewRecord((byte) 128, WriteAheadLogTest.TestRecord.class);
}
@BeforeMethod
public void beforeMethod() throws IOException {
closeBufferAndDeleteFile();
initBuffer();
Random random = new Random();
seed = (byte) (random.nextInt() & 0xFF);
}
private void closeBufferAndDeleteFile() throws IOException {
if (writeBuffer != null) {
if (readBuffer != null)
readBuffer.closeStorage(writeBuffer);
else
writeBuffer.close();
writeBuffer = null;
}
if (readBuffer != null) {
readBuffer.clear();
readBuffer = null;
}
if (writeAheadLog != null) {
writeAheadLog.delete();
writeAheadLog = null;
}
File testFile = new File(storageLocal.getConfiguration().getDirectory() + File.separator + "readWriteDiskCacheTest.tst");
if (testFile.exists()) {
Assert.assertTrue(testFile.delete());
}
File idMapFile = new File(storageLocal.getConfiguration().getDirectory() + File.separator + "name_id_map.cm");
if (idMapFile.exists()) {
Assert.assertTrue(idMapFile.delete());
}
}
@AfterClass
public void afterClass() throws IOException {
if (writeBuffer != null) {
if (readBuffer != null)
readBuffer.deleteStorage(writeBuffer);
else
writeBuffer.delete();
writeBuffer = null;
}
if (readBuffer != null) {
readBuffer.clear();
readBuffer = null;
}
if (writeAheadLog != null) {
writeAheadLog.delete();
writeAheadLog = null;
}
storageLocal.delete();
File file = new File(storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst");
if (file.exists()) {
Assert.assertTrue(file.delete());
file.getParentFile().delete();
}
}
private void initBuffer() throws IOException {
writeBuffer = new OWOWCache(false, 8 + systemOffset, -1, writeAheadLog, -1,
15000 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 15000 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING) + 4
* (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), storageLocal, false, 1);
readBuffer = new O2QCache(4 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 8 + systemOffset, false);
}
public void testAddFourItems() throws IOException {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[4];
for (int i = 0; i < 4; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 0);
Assert.assertEquals(a1out.size(), 0);
for (int i = 0; i < 4; i++) {
OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0,
0));
Assert.assertEquals(a1in.get(entry.getFileId(), entry.getPageIndex()), entry);
}
Assert.assertEquals(writeBuffer.getFilledUpTo(fileId), 4);
writeBuffer.flush();
for (int i = 0; i < 4; i++) {
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(0, 0));
}
}
public void testFrequentlyReadItemsAreMovedInAm() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[10];
for (int i = 0; i < 10; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8);
setLsn(entries[i].getCachePointer().getDataPointer(), new OLogSequenceNumber(1, i));
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
writeBuffer.flush();
readBuffer.clear();
for (int i = 0; i < 10; i++)
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(1, i));
for (int i = 0; i < 8; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
readBuffer.release(entries[i], writeBuffer);
}
for (int i = 2; i < 4; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
readBuffer.release(entries[i], writeBuffer);
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 2);
Assert.assertEquals(a1in.size(), 2);
Assert.assertEquals(a1out.size(), 2);
for (int i = 2; i < 4; i++) {
OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(
1, i));
Assert.assertEquals(am.get(fileId, i), lruEntry);
}
for (int i = 4; i < 6; i++) {
OCacheEntry lruEntry = generateRemovedEntry(fileId, i);
Assert.assertEquals(a1out.get(fileId, i), lruEntry);
}
for (int i = 6; i < 8; i++) {
OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(
1, i));
Assert.assertEquals(a1in.get(fileId, i), lruEntry);
}
}
public void testCacheShouldCreateFileIfItIsNotExisted() throws Exception {
readBuffer.addFile(fileName, writeBuffer);
File file = new File(storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst");
Assert.assertTrue(file.exists());
Assert.assertTrue(file.isFile());
}
public void testFrequentlyAddItemsAreMovedInAm() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[10];
for (int i = 0; i < 10; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8);
setLsn(entries[i].getCachePointer().getDataPointer(), new OLogSequenceNumber(1, i));
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(a1in.size(), 4);
Assert.assertEquals(a1out.size(), 2);
Assert.assertEquals(am.size(), 0);
for (int i = 6; i < 10; i++) {
OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(
0, 0));
Assert.assertEquals(a1in.get(fileId, i), lruEntry);
}
for (int i = 4; i < 6; i++) {
OCacheEntry lruEntry = generateRemovedEntry(fileId, i);
Assert.assertEquals(a1out.get(fileId, i), lruEntry);
}
for (int i = 4; i < 6; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
readBuffer.release(entries[i], writeBuffer);
}
Assert.assertEquals(am.size(), 2);
Assert.assertEquals(a1in.size(), 2);
Assert.assertEquals(a1out.size(), 2);
for (int i = 4; i < 6; i++) {
OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(
1, i));
Assert.assertEquals(am.get(fileId, i), lruEntry);
}
for (int i = 6; i < 8; i++) {
OCacheEntry lruEntry = generateRemovedEntry(fileId, i);
Assert.assertEquals(a1out.get(fileId, i), lruEntry);
}
for (int i = 8; i < 10; i++) {
OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(
0, 0));
Assert.assertEquals(a1in.get(fileId, i), lruEntry);
}
writeBuffer.flush();
for (int i = 0; i < 10; i++)
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(1, i));
}
public void testReadFourItems() throws IOException {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[4];
for (int i = 0; i < 4; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8);
setLsn(entries[i].getCachePointer().getDataPointer(), new OLogSequenceNumber(1, i));
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
readBuffer.clear();
writeBuffer.flush();
for (int i = 0; i < 4; i++) {
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(1, i));
}
for (int i = 0; i < 4; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
readBuffer.release(entries[i], writeBuffer);
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 0);
Assert.assertEquals(a1out.size(), 0);
for (int i = 0; i < 4; i++) {
OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(1,
i));
Assert.assertEquals(a1in.get(entry.getFileId(), entry.getPageIndex()), entry);
}
Assert.assertEquals(writeBuffer.getFilledUpTo(fileId), 4);
}
public void testLoadAndLockForReadShouldHitCache() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry cacheEntry = readBuffer.load(fileId, 0, false, writeBuffer, 0);
if (cacheEntry == null) {
cacheEntry = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(cacheEntry.getPageIndex(), 0);
}
readBuffer.release(cacheEntry, writeBuffer);
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 0);
Assert.assertEquals(a1out.size(), 0);
OCacheEntry entry = generateEntry(fileId, 0, cacheEntry.getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0));
Assert.assertEquals(a1in.size(), 1);
Assert.assertEquals(a1in.get(entry.getFileId(), entry.getPageIndex()), entry);
}
public void testCloseFileShouldFlushData() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[4];
for (int i = 0; i < 4; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 0);
Assert.assertEquals(a1out.size(), 0);
for (int i = 0; i < 4; i++) {
OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0,
0));
Assert.assertEquals(a1in.get(entry.getFileId(), entry.getPageIndex()), entry);
}
Assert.assertEquals(writeBuffer.getFilledUpTo(fileId), 4);
readBuffer.closeFile(fileId, true, writeBuffer);
for (int i = 0; i < 4; i++) {
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(0, 0));
}
}
public void testDeleteFileShouldDeleteFileFromHardDrive() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[4];
byte[][] content = new byte[4][];
for (int i = 0; i < 4; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
content[i] = entries[i].getCachePointer().getDataPointer().get(systemOffset + OWOWCache.PAGE_PADDING, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
readBuffer.deleteFile(fileId, writeBuffer);
writeBuffer.flush();
for (int i = 0; i < 4; i++) {
File file = new File(storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst");
Assert.assertFalse(file.exists());
}
}
public void testFlushData() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[4];
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; ++j) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, (byte) j, (byte) i }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 0);
Assert.assertEquals(a1out.size(), 0);
for (int i = 0; i < 4; i++) {
OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0,
0));
Assert.assertEquals(a1in.get(entry.getFileId(), entry.getPageIndex()), entry);
}
Assert.assertEquals(writeBuffer.getFilledUpTo(fileId), 4);
writeBuffer.flush(fileId);
for (int i = 0; i < 4; i++) {
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 3, (byte) i }, new OLogSequenceNumber(0, 0));
}
}
public void testIfNotEnoughSpaceOldPagesShouldBeMovedToA1Out() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[6];
for (int i = 0; i < 6; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
LRUList am = readBuffer.getAm();
LRUList a1in = readBuffer.getA1in();
LRUList a1out = readBuffer.getA1out();
Assert.assertEquals(am.size(), 0);
for (int i = 0; i < 2; i++) {
OCacheEntry entry = generateRemovedEntry(fileId, i);
Assert.assertEquals(a1out.get(entry.getFileId(), entry.getPageIndex()), entry);
}
for (int i = 2; i < 6; i++) {
OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0,
0));
Assert.assertEquals(a1in.get(entry.getFileId(), entry.getPageIndex()), entry);
}
Assert.assertEquals(writeBuffer.getFilledUpTo(fileId), 6);
writeBuffer.flush();
for (int i = 0; i < 6; i++) {
assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, new OLogSequenceNumber(0, 0));
}
}
@Test(expectedExceptions = OAllCacheEntriesAreUsedException.class)
public void testIfAllPagesAreUsedExceptionShouldBeThrown() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[5];
try {
for (int i = 0; i < 5; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8);
if (i - 4 >= 0) {
readBuffer.load(fileId, i - 4, false, writeBuffer, 0);
entries[i - 4].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) (i - 4), 1, 2, seed, 4, 5, 6, 7 }, 0, 8);
}
}
} finally {
for (int i = 0; i < 4; i++) {
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
}
}
public void testDataVerificationOK() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[6];
for (int i = 0; i < 6; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
Assert.assertTrue(writeBuffer.checkStoredPages(null).length == 0);
}
public void testMagicNumberIsBroken() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[6];
for (int i = 0; i < 6; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
writeBuffer.flush();
byte[] brokenMagicNumber = new byte[OIntegerSerializer.INT_SIZE];
OIntegerSerializer.INSTANCE.serializeNative(23, brokenMagicNumber, 0);
updateFilePage(2, 0, brokenMagicNumber);
updateFilePage(4, 0, brokenMagicNumber);
OPageDataVerificationError[] pageErrors = writeBuffer.checkStoredPages(null);
Assert.assertEquals(2, pageErrors.length);
Assert.assertTrue(pageErrors[0].incorrectMagicNumber);
Assert.assertFalse(pageErrors[0].incorrectCheckSum);
Assert.assertEquals(2, pageErrors[0].pageIndex);
Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[0].fileName);
Assert.assertTrue(pageErrors[1].incorrectMagicNumber);
Assert.assertFalse(pageErrors[1].incorrectCheckSum);
Assert.assertEquals(4, pageErrors[1].pageIndex);
Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[1].fileName);
}
public void testCheckSumIsBroken() throws Exception {
long fileId = readBuffer.addFile(fileName, writeBuffer);
OCacheEntry[] entries = new OCacheEntry[6];
for (int i = 0; i < 6; i++) {
entries[i] = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (entries[i] == null) {
entries[i] = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(entries[i].getPageIndex(), i);
}
entries[i].getCachePointer().acquireExclusiveLock();
entries[i].markDirty();
entries[i].getCachePointer().getDataPointer()
.set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8);
entries[i].getCachePointer().releaseExclusiveLock();
readBuffer.release(entries[i], writeBuffer);
}
writeBuffer.flush();
byte[] brokenByte = new byte[1];
brokenByte[0] = 13;
updateFilePage(2, systemOffset + 2, brokenByte);
updateFilePage(4, systemOffset + 4, brokenByte);
OPageDataVerificationError[] pageErrors = writeBuffer.checkStoredPages(null);
Assert.assertEquals(2, pageErrors.length);
Assert.assertFalse(pageErrors[0].incorrectMagicNumber);
Assert.assertTrue(pageErrors[0].incorrectCheckSum);
Assert.assertEquals(2, pageErrors[0].pageIndex);
Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[0].fileName);
Assert.assertFalse(pageErrors[1].incorrectMagicNumber);
Assert.assertTrue(pageErrors[1].incorrectCheckSum);
Assert.assertEquals(4, pageErrors[1].pageIndex);
Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[1].fileName);
}
public void testFlushTillLSN() throws Exception {
closeBufferAndDeleteFile();
File file = new File(storageLocal.getConfiguration().getDirectory());
if (!file.exists())
file.mkdir();
writeAheadLog = new ODiskWriteAheadLog(1024, -1, 10 * 1024, null, storageLocal);
final OStorageSegmentConfiguration segmentConfiguration = new OStorageSegmentConfiguration(storageLocal.getConfiguration(),
"readWriteDiskCacheTest.tst", 0);
segmentConfiguration.fileType = OFileClassic.NAME;
writeBuffer = new OWOWCache(false, 8 + systemOffset, 10000, writeAheadLog, 100,
2 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 2 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING) + 4
* (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), storageLocal, false, 10);
readBuffer = new O2QCache(4 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 8 + systemOffset, false);
long fileId = readBuffer.addFile(fileName, writeBuffer);
OLogSequenceNumber lsnToFlush = null;
for (int i = 0; i < 8; i++) {
OCacheEntry cacheEntry = readBuffer.load(fileId, i, false, writeBuffer, 0);
if (cacheEntry == null) {
cacheEntry = readBuffer.allocateNewPage(fileId, writeBuffer);
Assert.assertEquals(cacheEntry.getPageIndex(), i);
}
OCachePointer dataPointer = cacheEntry.getCachePointer();
dataPointer.acquireExclusiveLock();
OLogSequenceNumber pageLSN = writeAheadLog.log(new WriteAheadLogTest.TestRecord(30, false));
setLsn(dataPointer.getDataPointer(), pageLSN);
lsnToFlush = pageLSN;
cacheEntry.markDirty();
dataPointer.releaseExclusiveLock();
readBuffer.release(cacheEntry, writeBuffer);
}
Thread.sleep(1000);
Assert.assertEquals(writeAheadLog.getFlushedLsn(), lsnToFlush);
}
private void updateFilePage(long pageIndex, long offset, byte[] value) throws IOException {
String path = storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst";
OFileClassic fileClassic = new OFileClassic(path, "rw");
fileClassic.open();
fileClassic.write(pageIndex * (8 + systemOffset) + offset, value, value.length, 0);
fileClassic.synch();
fileClassic.close();
}
private void assertFile(long pageIndex, byte[] value, OLogSequenceNumber lsn) throws IOException {
String path = storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst";
OFileClassic fileClassic = new OFileClassic(path, "r");
fileClassic.open();
byte[] content = new byte[8 + systemOffset];
fileClassic.read(pageIndex * (8 + systemOffset), content, 8 + systemOffset);
Assert.assertEquals(Arrays.copyOfRange(content, systemOffset, 8 + systemOffset), value);
long magicNumber = OLongSerializer.INSTANCE.deserializeNative(content, 0);
Assert.assertEquals(magicNumber, OWOWCache.MAGIC_NUMBER);
CRC32 crc32 = new CRC32();
crc32.update(content, OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE, content.length - OIntegerSerializer.INT_SIZE
- OLongSerializer.LONG_SIZE);
int crc = OIntegerSerializer.INSTANCE.deserializeNative(content, OLongSerializer.LONG_SIZE);
Assert.assertEquals(crc, (int) crc32.getValue());
long segment = OLongSerializer.INSTANCE.deserializeNative(content, ODurablePage.WAL_SEGMENT_OFFSET);
long position = OLongSerializer.INSTANCE.deserializeNative(content, ODurablePage.WAL_POSITION_OFFSET);
OLogSequenceNumber readLsn = new OLogSequenceNumber(segment, position);
Assert.assertEquals(readLsn, lsn);
fileClassic.close();
}
private OCacheEntry generateEntry(long fileId, long pageIndex, ODirectMemoryPointer pointer, boolean dirty, OLogSequenceNumber lsn) {
return new OCacheEntry(fileId, pageIndex, new OCachePointer(pointer, lsn, fileId, pageIndex), dirty);
}
private OCacheEntry generateRemovedEntry(long fileId, long pageIndex) {
return new OCacheEntry(fileId, pageIndex, null, false);
}
private void setLsn(ODirectMemoryPointer dataPointer, OLogSequenceNumber lsn) {
OLongSerializer.INSTANCE.serializeInDirectMemory(lsn.getSegment(), dataPointer, OIntegerSerializer.INT_SIZE
+ OLongSerializer.LONG_SIZE + OWOWCache.PAGE_PADDING);
OLongSerializer.INSTANCE.serializeInDirectMemory(lsn.getPosition(), dataPointer, OIntegerSerializer.INT_SIZE + 2
* OLongSerializer.LONG_SIZE + OWOWCache.PAGE_PADDING);
}
}
|
|
package io.airlift.http.client;
import com.google.common.collect.ImmutableList;
import io.airlift.http.client.HttpClient.HttpResponseFuture;
import io.airlift.http.client.StatusResponseHandler.StatusResponse;
import io.airlift.http.client.StringResponseHandler.StringResponse;
import io.airlift.http.client.jetty.JettyHttpClient;
import io.airlift.log.Logging;
import io.airlift.testing.Closeables;
import io.airlift.units.Duration;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.net.ConnectException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.UnresolvedAddressException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import static com.google.common.base.Throwables.getStackTraceAsString;
import static com.google.common.base.Throwables.propagateIfPossible;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.google.common.net.HttpHeaders.ACCEPT_ENCODING;
import static com.google.common.net.HttpHeaders.AUTHORIZATION;
import static com.google.common.net.HttpHeaders.CONTENT_LENGTH;
import static com.google.common.net.HttpHeaders.CONTENT_TYPE;
import static com.google.common.net.HttpHeaders.LOCATION;
import static com.google.common.net.HttpHeaders.USER_AGENT;
import static io.airlift.concurrent.Threads.threadsNamed;
import static io.airlift.http.client.Request.Builder.prepareDelete;
import static io.airlift.http.client.Request.Builder.prepareGet;
import static io.airlift.http.client.Request.Builder.preparePost;
import static io.airlift.http.client.Request.Builder.preparePut;
import static io.airlift.http.client.StatusResponseHandler.createStatusResponseHandler;
import static io.airlift.http.client.StringResponseHandler.createStringResponseHandler;
import static io.airlift.testing.Assertions.assertBetweenInclusive;
import static io.airlift.testing.Assertions.assertGreaterThanOrEqual;
import static io.airlift.testing.Assertions.assertLessThan;
import static io.airlift.testing.Closeables.closeQuietly;
import static io.airlift.units.Duration.nanosSince;
import static java.lang.String.format;
import static java.lang.Thread.currentThread;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
@Test(singleThreaded = true)
public abstract class AbstractHttpClientTest
{
protected EchoServlet servlet;
protected TestingHttpServer server;
protected URI baseURI;
private String scheme = "http";
private String host = "127.0.0.1";
protected String keystore;
protected AbstractHttpClientTest()
{
}
protected AbstractHttpClientTest(String host, String keystore)
{
scheme = "https";
this.host = host;
this.keystore = keystore;
}
protected abstract HttpClientConfig createClientConfig();
public abstract <T, E extends Exception> T executeRequest(Request request, ResponseHandler<T, E> responseHandler)
throws Exception;
public abstract <T, E extends Exception> T executeRequest(HttpClientConfig config, Request request, ResponseHandler<T, E> responseHandler)
throws Exception;
@BeforeSuite
public void setupSuite()
{
Logging.initialize();
}
@BeforeMethod
public void abstractSetup()
throws Exception
{
servlet = new EchoServlet();
TestingHttpServer server = new TestingHttpServer(Optional.ofNullable(keystore), servlet);
baseURI = new URI(scheme, null, server.getHostAndPort().getHost(), server.getHostAndPort().getPort(), null, null, null);
}
@AfterMethod(alwaysRun = true)
public void abstractTeardown()
throws Exception
{
if (server != null) {
server.close();
}
}
@Test(enabled = false, description = "This takes over a minute to run")
public void test100kGets()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere?query");
Request request = prepareGet()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.build();
for (int i = 0; i < 100_000; i++) {
try {
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
}
catch (Exception e) {
throw new Exception("Error on request " + i, e);
}
}
}
@Test(timeOut = 5000)
public void testConnectTimeout()
throws Exception
{
try (BackloggedServer server = new BackloggedServer()) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, MILLISECONDS));
config.setIdleTimeout(new Duration(2, SECONDS));
Request request = prepareGet()
.setUri(new URI(scheme, null, host, server.getPort(), "/", null, null))
.build();
long start = System.nanoTime();
try {
executeRequest(config, request, new CaptureExceptionResponseHandler());
fail("expected exception");
}
catch (CapturedException e) {
Throwable t = e.getCause();
if (!(isConnectTimeout(t) || t instanceof ClosedChannelException)) {
fail(format("unexpected exception: [%s]", getStackTraceAsString(t)));
}
assertLessThan(nanosSince(start), new Duration(300, MILLISECONDS));
}
}
}
@Test(expectedExceptions = ConnectException.class)
public void testConnectionRefused()
throws Exception
{
int port = findUnusedPort();
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
Request request = prepareGet()
.setUri(new URI(scheme, null, host, port, "/", null, null))
.build();
executeExceptionRequest(config, request);
}
@Test
public void testConnectionRefusedWithDefaultingResponseExceptionHandler()
throws Exception
{
int port = findUnusedPort();
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, MILLISECONDS));
Request request = prepareGet()
.setUri(new URI(scheme, null, host, port, "/", null, null))
.build();
Object expected = new Object();
assertEquals(executeRequest(config, request, new DefaultOnExceptionResponseHandler(expected)), expected);
}
@Test(expectedExceptions = {UnknownHostException.class, UnresolvedAddressException.class}, timeOut = 10000)
public void testUnresolvableHost()
throws Exception
{
String invalidHost = "nonexistent.invalid";
assertUnknownHost(invalidHost);
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
Request request = prepareGet()
.setUri(URI.create("http://" + invalidHost))
.build();
executeExceptionRequest(config, request);
}
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = ".*port out of range.*")
public void testBadPort()
throws Exception
{
HttpClientConfig config = createClientConfig();
Request request = prepareGet()
.setUri(new URI(scheme, null, host, 70_000, "/", null, null))
.build();
executeExceptionRequest(config, request);
}
@Test
public void testDeleteMethod()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere");
Request request = prepareDelete()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "DELETE");
assertEquals(servlet.getRequestUri(), uri);
assertEquals(servlet.getRequestHeaders("foo"), ImmutableList.of("bar"));
assertEquals(servlet.getRequestHeaders("dupe"), ImmutableList.of("first", "second"));
assertEquals(servlet.getRequestHeaders("x-custom-filter"), ImmutableList.of("custom value"));
}
@Test
public void testErrorResponseBody()
throws Exception
{
servlet.setResponseStatusCode(500);
servlet.setResponseBody("body text");
Request request = prepareGet()
.setUri(baseURI)
.build();
StringResponse response = executeRequest(request, createStringResponseHandler());
assertEquals(response.getStatusCode(), 500);
assertEquals(response.getBody(), "body text");
}
@Test
public void testGetMethod()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere?query");
Request request = prepareGet()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "GET");
if (servlet.getRequestUri().toString().endsWith("=")) {
// todo jetty client rewrites the uri string for some reason
assertEquals(servlet.getRequestUri(), new URI(uri + "="));
}
else {
assertEquals(servlet.getRequestUri(), uri);
}
assertEquals(servlet.getRequestHeaders("foo"), ImmutableList.of("bar"));
assertEquals(servlet.getRequestHeaders("dupe"), ImmutableList.of("first", "second"));
assertEquals(servlet.getRequestHeaders("x-custom-filter"), ImmutableList.of("custom value"));
}
@Test
public void testResponseHeadersCaseInsensitive()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere");
Request request = prepareGet()
.setUri(uri)
.build();
Response response = executeRequest(request, new PassThroughResponseHandler());
assertNotNull(response.getHeader("date"));
assertNotNull(response.getHeader("DATE"));
assertEquals(response.getHeaders("date").size(), 1);
assertEquals(response.getHeaders("DATE").size(), 1);
}
@Test
public void testQuotedSpace()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere?query=ab%20cd");
Request request = prepareGet()
.setUri(uri)
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "GET");
assertEquals(servlet.getRequestUri(), uri);
}
@Test
public void testKeepAlive()
throws Exception
{
URI uri = URI.create(baseURI.toASCIIString() + "/?remotePort=");
Request request = prepareGet()
.setUri(uri)
.build();
StatusResponse response1 = executeRequest(request, createStatusResponseHandler());
Thread.sleep(1000);
StatusResponse response2 = executeRequest(request, createStatusResponseHandler());
Thread.sleep(1000);
StatusResponse response3 = executeRequest(request, createStatusResponseHandler());
assertNotNull(response1.getHeader("remotePort"));
assertNotNull(response2.getHeader("remotePort"));
assertNotNull(response3.getHeader("remotePort"));
int port1 = Integer.parseInt(response1.getHeader("remotePort"));
int port2 = Integer.parseInt(response2.getHeader("remotePort"));
int port3 = Integer.parseInt(response3.getHeader("remotePort"));
assertEquals(port2, port1);
assertEquals(port3, port1);
assertBetweenInclusive(port1, 1024, 65535);
}
@Test
public void testPostMethod()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere");
Request request = preparePost()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "POST");
assertEquals(servlet.getRequestUri(), uri);
assertEquals(servlet.getRequestHeaders("foo"), ImmutableList.of("bar"));
assertEquals(servlet.getRequestHeaders("dupe"), ImmutableList.of("first", "second"));
assertEquals(servlet.getRequestHeaders("x-custom-filter"), ImmutableList.of("custom value"));
}
@Test
public void testPutMethod()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere");
Request request = preparePut()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "PUT");
assertEquals(servlet.getRequestUri(), uri);
assertEquals(servlet.getRequestHeaders("foo"), ImmutableList.of("bar"));
assertEquals(servlet.getRequestHeaders("dupe"), ImmutableList.of("first", "second"));
assertEquals(servlet.getRequestHeaders("x-custom-filter"), ImmutableList.of("custom value"));
}
@Test
public void testPutMethodWithStaticBodyGenerator()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere");
byte[] body = {1, 2, 5};
Request request = preparePut()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.setBodyGenerator(StaticBodyGenerator.createStaticBodyGenerator(body))
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "PUT");
assertEquals(servlet.getRequestUri(), uri);
assertEquals(servlet.getRequestHeaders("foo"), ImmutableList.of("bar"));
assertEquals(servlet.getRequestHeaders("dupe"), ImmutableList.of("first", "second"));
assertEquals(servlet.getRequestHeaders("x-custom-filter"), ImmutableList.of("custom value"));
assertEquals(servlet.getRequestBytes(), body);
}
@Test
public void testPutMethodWithDynamicBodyGenerator()
throws Exception
{
URI uri = baseURI.resolve("/road/to/nowhere");
Request request = preparePut()
.setUri(uri)
.addHeader("foo", "bar")
.addHeader("dupe", "first")
.addHeader("dupe", "second")
.setBodyGenerator(out -> {
out.write(1);
out.write(new byte[] {2, 5});
})
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "PUT");
assertEquals(servlet.getRequestUri(), uri);
assertEquals(servlet.getRequestHeaders("foo"), ImmutableList.of("bar"));
assertEquals(servlet.getRequestHeaders("dupe"), ImmutableList.of("first", "second"));
assertEquals(servlet.getRequestHeaders("x-custom-filter"), ImmutableList.of("custom value"));
assertEquals(servlet.getRequestBytes(), new byte[] {1, 2, 5});
}
@Test
public void testOverrideMethodFilterInHttpServertestPutMethodWithFileBodyGenerator()
throws Exception
{
byte[] contents = "hello world".getBytes(UTF_8);
File testFile = File.createTempFile("test", null);
Files.write(testFile.toPath(), contents);
URI uri = baseURI.resolve("/road/to/nowhere");
Request request = preparePut()
.setUri(uri)
.addHeader(CONTENT_TYPE, "x-test")
.setBodyGenerator(new FileBodyGenerator(testFile.toPath()))
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 200);
assertEquals(servlet.getRequestMethod(), "PUT");
assertEquals(servlet.getRequestUri(), uri);
assertEquals(servlet.getRequestHeaders(CONTENT_TYPE), ImmutableList.of("x-test"));
assertEquals(servlet.getRequestHeaders(CONTENT_LENGTH), ImmutableList.of(String.valueOf(contents.length)));
assertEquals(servlet.getRequestBytes(), contents);
assertTrue(testFile.delete());
}
@Test(expectedExceptions = {IOException.class, TimeoutException.class})
public void testReadTimeout()
throws Exception
{
HttpClientConfig config = createClientConfig()
.setIdleTimeout(new Duration(500, MILLISECONDS));
URI uri = URI.create(baseURI.toASCIIString() + "/?sleep=1000");
Request request = prepareGet()
.setUri(uri)
.build();
executeRequest(config, request, new ExceptionResponseHandler());
}
@Test
public void testResponseBody()
throws Exception
{
servlet.setResponseBody("body text");
Request request = prepareGet()
.setUri(baseURI)
.build();
StringResponse response = executeRequest(request, createStringResponseHandler());
assertEquals(response.getStatusCode(), 200);
assertEquals(response.getBody(), "body text");
}
@Test
public void testResponseBodyEmpty()
throws Exception
{
Request request = prepareGet()
.setUri(baseURI)
.build();
String body = executeRequest(request, createStringResponseHandler()).getBody();
assertEquals(body, "");
}
@Test
public void testResponseHeader()
throws Exception
{
servlet.addResponseHeader("foo", "bar");
servlet.addResponseHeader("dupe", "first");
servlet.addResponseHeader("dupe", "second");
Request request = prepareGet()
.setUri(baseURI)
.build();
StatusResponse response = executeRequest(request, createStatusResponseHandler());
assertEquals(response.getHeaders("foo"), ImmutableList.of("bar"));
assertEquals(response.getHeaders("dupe"), ImmutableList.of("first", "second"));
}
@Test
public void testResponseStatusCode()
throws Exception
{
servlet.setResponseStatusCode(543);
Request request = prepareGet()
.setUri(baseURI)
.build();
int statusCode = executeRequest(request, createStatusResponseHandler()).getStatusCode();
assertEquals(statusCode, 543);
}
@Test
public void testRequestHeaders()
throws Exception
{
String basic = "Basic dGVzdDphYmM=";
String bearer = "Bearer testxyz";
Request request = prepareGet()
.setUri(baseURI)
.addHeader("X-Test", "xtest1")
.addHeader("X-Test", "xtest2")
.setHeader(USER_AGENT, "testagent")
.addHeader(AUTHORIZATION, basic)
.addHeader(AUTHORIZATION, bearer)
.build();
StatusResponse response = executeRequest(request, createStatusResponseHandler());
assertEquals(response.getStatusCode(), 200);
assertThat(servlet.getRequestHeaders("X-Test")).containsExactly("xtest1", "xtest2");
assertThat(servlet.getRequestHeaders(USER_AGENT)).containsExactly("testagent");
assertThat(servlet.getRequestHeaders(AUTHORIZATION)).containsExactly(basic, bearer);
}
@Test
public void testRedirectRequestHeaders()
throws Exception
{
String basic = "Basic dGVzdDphYmM=";
String bearer = "Bearer testxyz";
Request request = prepareGet()
.setUri(URI.create(baseURI.toASCIIString() + "/?redirect=/redirect"))
.addHeader("X-Test", "xtest1")
.addHeader("X-Test", "xtest2")
.setHeader(USER_AGENT, "testagent")
.addHeader(AUTHORIZATION, basic)
.addHeader(AUTHORIZATION, bearer)
.build();
StatusResponse response = executeRequest(request, createStatusResponseHandler());
assertEquals(response.getStatusCode(), 200);
assertEquals(servlet.getRequestUri(), URI.create(baseURI.toASCIIString() + "/redirect"));
assertThat(servlet.getRequestHeaders("X-Test")).containsExactly("xtest1", "xtest2");
assertThat(servlet.getRequestHeaders(USER_AGENT)).containsExactly("testagent");
assertThat(servlet.getRequestHeaders(AUTHORIZATION)).isEmpty();
request = Request.Builder.fromRequest(request)
.setPreserveAuthorizationOnRedirect(true)
.build();
response = executeRequest(request, createStatusResponseHandler());
assertEquals(response.getStatusCode(), 200);
assertEquals(servlet.getRequestUri(), URI.create(baseURI.toASCIIString() + "/redirect"));
assertThat(servlet.getRequestHeaders("X-Test")).containsExactly("xtest1", "xtest2");
assertThat(servlet.getRequestHeaders(USER_AGENT)).containsExactly("testagent");
assertThat(servlet.getRequestHeaders(AUTHORIZATION)).containsExactly(basic, bearer);
}
@Test
public void testFollowRedirects()
throws Exception
{
Request request = prepareGet()
.setUri(URI.create(baseURI.toASCIIString() + "/test?redirect=/redirect"))
.build();
StatusResponse response = executeRequest(request, createStatusResponseHandler());
assertEquals(response.getStatusCode(), 200);
assertNull(response.getHeader(LOCATION));
assertEquals(servlet.getRequestUri(), URI.create(baseURI.toASCIIString() + "/redirect"));
request = Request.Builder.fromRequest(request)
.setFollowRedirects(false)
.build();
response = executeRequest(request, createStatusResponseHandler());
assertEquals(response.getStatusCode(), 302);
assertEquals(response.getHeader(LOCATION), baseURI.toASCIIString() + "/redirect");
assertEquals(servlet.getRequestUri(), request.getUri());
}
@Test(expectedExceptions = UnexpectedResponseException.class)
public void testThrowsUnexpectedResponseException()
throws Exception
{
servlet.setResponseStatusCode(543);
Request request = prepareGet()
.setUri(baseURI)
.build();
executeRequest(request, new UnexpectedResponseStatusCodeHandler(200));
}
@Test
public void testCompressionIsDisabled()
throws Exception
{
Request request = prepareGet()
.setUri(baseURI)
.build();
String body = executeRequest(request, createStringResponseHandler()).getBody();
assertEquals(body, "");
assertFalse(servlet.getRequestHeaders().containsKey(HeaderName.of(ACCEPT_ENCODING)));
String json = "{\"fuite\":\"apple\",\"hello\":\"world\"}";
assertGreaterThanOrEqual(json.length(), GzipHandler.DEFAULT_MIN_GZIP_SIZE);
servlet.setResponseBody(json);
servlet.addResponseHeader(CONTENT_TYPE, "application/json");
StringResponse response = executeRequest(request, createStringResponseHandler());
assertEquals(response.getHeader(CONTENT_TYPE), "application/json");
assertEquals(response.getBody(), json);
}
private ExecutorService executor;
@BeforeClass
public final void setUp()
throws Exception
{
executor = Executors.newCachedThreadPool(threadsNamed("test-%s"));
}
@AfterClass(alwaysRun = true)
public final void tearDown()
throws Exception
{
if (executor != null) {
executor.shutdownNow();
}
}
@Test(expectedExceptions = {IOException.class, TimeoutException.class})
public void testConnectNoRead()
throws Exception
{
try (FakeServer fakeServer = new FakeServer(scheme, host, 0, null, false)) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
config.setIdleTimeout(new Duration(10, MILLISECONDS));
executeRequest(fakeServer, config);
}
}
@Test(expectedExceptions = IOException.class)
public void testConnectNoReadClose()
throws Exception
{
try (FakeServer fakeServer = new FakeServer(scheme, host, 0, null, true)) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
config.setIdleTimeout(new Duration(5, SECONDS));
executeRequest(fakeServer, config);
}
}
@Test(expectedExceptions = {IOException.class, TimeoutException.class})
public void testConnectReadIncomplete()
throws Exception
{
try (FakeServer fakeServer = new FakeServer(scheme, host, 10, null, false)) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
config.setIdleTimeout(new Duration(10, MILLISECONDS));
executeRequest(fakeServer, config);
}
}
@Test(expectedExceptions = {IOException.class, TimeoutException.class})
public void testConnectReadIncompleteClose()
throws Exception
{
try (FakeServer fakeServer = new FakeServer(scheme, host, 10, null, true)) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(500, MILLISECONDS));
config.setIdleTimeout(new Duration(500, MILLISECONDS));
executeRequest(fakeServer, config);
}
}
@Test(expectedExceptions = IOException.class)
public void testConnectReadRequestClose()
throws Exception
{
try (FakeServer fakeServer = new FakeServer(scheme, host, Long.MAX_VALUE, null, true)) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
config.setIdleTimeout(new Duration(5, SECONDS));
executeRequest(fakeServer, config);
}
}
@Test(expectedExceptions = Exception.class)
public void testConnectReadRequestWriteJunkHangup()
throws Exception
{
try (FakeServer fakeServer = new FakeServer(scheme, host, 10, "THIS\nIS\nJUNK\n\n".getBytes(), false)) {
HttpClientConfig config = createClientConfig();
config.setConnectTimeout(new Duration(5, SECONDS));
config.setIdleTimeout(new Duration(5, SECONDS));
executeRequest(fakeServer, config);
}
}
@Test(expectedExceptions = CustomError.class)
public void testHandlesUndeclaredThrowable()
throws Exception
{
Request request = prepareGet()
.setUri(baseURI)
.build();
executeRequest(request, new ThrowErrorResponseHandler());
}
private void executeExceptionRequest(HttpClientConfig config, Request request)
throws Exception
{
try {
executeRequest(config, request, new CaptureExceptionResponseHandler());
fail("expected exception");
}
catch (CapturedException e) {
propagateIfPossible(e.getCause(), Exception.class);
throw new RuntimeException(e.getCause());
}
}
private void executeRequest(FakeServer fakeServer, HttpClientConfig config)
throws Exception
{
// kick the fake server
executor.execute(fakeServer);
// timing based check to assure we don't hang
long start = System.nanoTime();
try {
Request request = prepareGet()
.setUri(fakeServer.getUri())
.build();
executeRequest(config, request, new ExceptionResponseHandler());
}
finally {
assertLessThan(nanosSince(start), new Duration(1, SECONDS), "Expected request to finish quickly");
}
}
private static class FakeServer
implements Closeable, Runnable
{
private final ServerSocket serverSocket;
private final long readBytes;
private final byte[] writeBuffer;
private final boolean closeConnectionImmediately;
private final AtomicReference<Socket> connectionSocket = new AtomicReference<>();
private final String scheme;
private final String host;
private FakeServer(String scheme, String host, long readBytes, byte[] writeBuffer, boolean closeConnectionImmediately)
throws Exception
{
this.scheme = scheme;
this.host = host;
this.writeBuffer = writeBuffer;
this.readBytes = readBytes;
this.serverSocket = new ServerSocket(0, 50, InetAddress.getByName(host));
this.closeConnectionImmediately = closeConnectionImmediately;
}
public URI getUri()
{
try {
return new URI(scheme, null, host, serverSocket.getLocalPort(), "/", null, null);
}
catch (URISyntaxException e) {
throw new IllegalStateException(e);
}
}
@Override
public void run()
{
try {
Socket connectionSocket = serverSocket.accept();
this.connectionSocket.set(connectionSocket);
if (readBytes > 0) {
connectionSocket.setSoTimeout(5);
long bytesRead = 0;
try {
InputStream inputStream = connectionSocket.getInputStream();
while (bytesRead < readBytes) {
inputStream.read();
bytesRead++;
}
}
catch (SocketTimeoutException ignored) {
}
}
if (writeBuffer != null) {
connectionSocket.getOutputStream().write(writeBuffer);
}
// todo sleep here maybe
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
finally {
if (closeConnectionImmediately) {
closeQuietly(connectionSocket.get());
}
}
}
@Override
public void close()
throws IOException
{
closeQuietly(connectionSocket.get());
serverSocket.close();
}
}
public static class ExceptionResponseHandler
implements ResponseHandler<Void, Exception>
{
@Override
public Void handleException(Request request, Exception exception)
throws Exception
{
throw exception;
}
@Override
public Void handle(Request request, Response response)
throws Exception
{
throw new UnsupportedOperationException();
}
}
private static class PassThroughResponseHandler
implements ResponseHandler<Response, RuntimeException>
{
@Override
public Response handleException(Request request, Exception exception)
{
throw ResponseHandlerUtils.propagate(request, exception);
}
@Override
public Response handle(Request request, Response response)
{
return response;
}
}
private static class UnexpectedResponseStatusCodeHandler
implements ResponseHandler<Integer, RuntimeException>
{
private final int expectedStatusCode;
UnexpectedResponseStatusCodeHandler(int expectedStatusCode)
{
this.expectedStatusCode = expectedStatusCode;
}
@Override
public Integer handleException(Request request, Exception exception)
{
throw ResponseHandlerUtils.propagate(request, exception);
}
@Override
public Integer handle(Request request, Response response)
throws RuntimeException
{
if (response.getStatusCode() != expectedStatusCode) {
throw new UnexpectedResponseException(request, response);
}
return response.getStatusCode();
}
}
public static class CaptureExceptionResponseHandler
implements ResponseHandler<String, CapturedException>
{
@Override
public String handleException(Request request, Exception exception)
throws CapturedException
{
throw new CapturedException(exception);
}
@Override
public String handle(Request request, Response response)
{
return null;
}
}
public static class ThrowErrorResponseHandler
implements ResponseHandler<String, Exception>
{
@Override
public String handleException(Request request, Exception exception)
{
throw new UnsupportedOperationException("not yet implemented", exception);
}
@Override
public String handle(Request request, Response response)
{
throw new CustomError();
}
}
private static class CustomError
extends Error
{
}
public static class CapturedException
extends Exception
{
public CapturedException(Exception exception)
{
super(exception);
}
}
private class DefaultOnExceptionResponseHandler
implements ResponseHandler<Object, RuntimeException>
{
private final Object defaultObject;
public DefaultOnExceptionResponseHandler(Object defaultObject)
{
this.defaultObject = defaultObject;
}
@Override
public Object handleException(Request request, Exception exception)
throws RuntimeException
{
return defaultObject;
}
@Override
public Object handle(Request request, Response response)
throws RuntimeException
{
throw new UnsupportedOperationException();
}
}
private static int findUnusedPort()
throws IOException
{
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
}
}
@SuppressWarnings("SocketOpenedButNotSafelyClosed")
private static class BackloggedServer
implements Closeable
{
private final List<Socket> clientSockets = new ArrayList<>();
private final ServerSocket serverSocket;
private final SocketAddress localSocketAddress;
private BackloggedServer()
throws IOException
{
this.serverSocket = new ServerSocket(0, 1, InetAddress.getByName("127.0.0.1"));
localSocketAddress = serverSocket.getLocalSocketAddress();
// some systems like Linux have a large minimum backlog
int i = 0;
while (i <= 256) {
if (!connect()) {
return;
}
i++;
}
throw new SkipException(format("socket backlog is too large (%s connections accepted)", i));
}
@Override
public void close()
{
clientSockets.forEach(Closeables::closeQuietly);
closeQuietly(serverSocket);
}
private int getPort()
{
return serverSocket.getLocalPort();
}
private boolean connect()
throws IOException
{
Socket socket = new Socket();
clientSockets.add(socket);
try {
socket.connect(localSocketAddress, 5);
return true;
}
catch (IOException e) {
if (isConnectTimeout(e)) {
return false;
}
throw e;
}
}
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private static void assertUnknownHost(String host)
{
try {
InetAddress.getByName(host);
fail("Expected UnknownHostException for host " + host);
}
catch (UnknownHostException e) {
// expected
}
}
private static boolean isConnectTimeout(Throwable t)
{
// Linux refuses connections immediately rather than queuing them
return (t instanceof SocketTimeoutException) || (t instanceof SocketException);
}
public static <T, E extends Exception> T executeAsync(JettyHttpClient client, Request request, ResponseHandler<T, E> responseHandler)
throws E
{
HttpResponseFuture<T> future = null;
try {
future = client.executeAsync(request, responseHandler);
}
catch (Exception e) {
fail("Unexpected exception", e);
}
try {
return future.get();
}
catch (InterruptedException e) {
currentThread().interrupt();
throw new RuntimeException(e);
}
catch (ExecutionException e) {
throwIfUnchecked(e.getCause());
if (e.getCause() instanceof Exception) {
// the HTTP client and ResponseHandler interface enforces this
throw AbstractHttpClientTest.<E>castThrowable(e.getCause());
}
// e.getCause() is some direct subclass of throwable
throw new RuntimeException(e.getCause());
}
}
@SuppressWarnings("unchecked")
private static <E extends Exception> E castThrowable(Throwable t)
{
return (E) t;
}
}
|
|
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import groovy.lang.Closure;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.BeanDefinitionStoreException;
import org.springframework.beans.factory.groovy.GroovyBeanDefinitionReader;
import org.springframework.beans.factory.support.BeanDefinitionReader;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanNameGenerator;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.context.annotation.AnnotatedBeanDefinitionReader;
import org.springframework.context.annotation.ClassPathBeanDefinitionScanner;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.core.type.filter.AbstractTypeHierarchyTraversingFilter;
import org.springframework.core.type.filter.TypeFilter;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
/**
* Loads bean definitions from underlying sources, including XML and JavaConfig. Acts as a
* simple facade over {@link AnnotatedBeanDefinitionReader},
* {@link XmlBeanDefinitionReader} and {@link ClassPathBeanDefinitionScanner}. See
* {@link SpringApplication} for the types of sources that are supported.
*
* @author Phillip Webb
* @see #setBeanNameGenerator(BeanNameGenerator)
*/
class BeanDefinitionLoader {
private final Object[] sources;
private final AnnotatedBeanDefinitionReader annotatedReader;
private final XmlBeanDefinitionReader xmlReader;
private BeanDefinitionReader groovyReader;
private final ClassPathBeanDefinitionScanner scanner;
private ResourceLoader resourceLoader;
/**
* Create a new {@link BeanDefinitionLoader} that will load beans into the specified
* {@link BeanDefinitionRegistry}.
* @param registry the bean definition registry that will contain the loaded beans
* @param sources the bean sources
*/
BeanDefinitionLoader(BeanDefinitionRegistry registry, Object... sources) {
Assert.notNull(registry, "Registry must not be null");
Assert.notEmpty(sources, "Sources must not be empty");
this.sources = sources;
this.annotatedReader = new AnnotatedBeanDefinitionReader(registry);
this.xmlReader = new XmlBeanDefinitionReader(registry);
if (isGroovyPresent()) {
this.groovyReader = new GroovyBeanDefinitionReader(registry);
}
this.scanner = new ClassPathBeanDefinitionScanner(registry);
this.scanner.addExcludeFilter(new ClassExcludeFilter(sources));
}
/**
* Set the bean name generator to be used by the underlying readers and scanner.
* @param beanNameGenerator the bean name generator
*/
public void setBeanNameGenerator(BeanNameGenerator beanNameGenerator) {
this.annotatedReader.setBeanNameGenerator(beanNameGenerator);
this.xmlReader.setBeanNameGenerator(beanNameGenerator);
this.scanner.setBeanNameGenerator(beanNameGenerator);
}
/**
* Set the resource loader to be used by the underlying readers and scanner.
* @param resourceLoader the resource loader
*/
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
this.xmlReader.setResourceLoader(resourceLoader);
this.scanner.setResourceLoader(resourceLoader);
}
/**
* Set the environment to be used by the underlying readers and scanner.
* @param environment the environment
*/
public void setEnvironment(ConfigurableEnvironment environment) {
this.annotatedReader.setEnvironment(environment);
this.xmlReader.setEnvironment(environment);
this.scanner.setEnvironment(environment);
}
/**
* Load the sources into the reader.
* @return the number of loaded beans
*/
public int load() {
int count = 0;
for (Object source : this.sources) {
count += load(source);
}
return count;
}
private int load(Object source) {
Assert.notNull(source, "Source must not be null");
if (source instanceof Class<?>) {
return load((Class<?>) source);
}
if (source instanceof Resource) {
return load((Resource) source);
}
if (source instanceof Package) {
return load((Package) source);
}
if (source instanceof CharSequence) {
return load((CharSequence) source);
}
throw new IllegalArgumentException("Invalid source type " + source.getClass());
}
private int load(Class<?> source) {
if (isGroovyPresent()) {
// Any GroovyLoaders added in beans{} DSL can contribute beans here
if (GroovyBeanDefinitionSource.class.isAssignableFrom(source)) {
GroovyBeanDefinitionSource loader = BeanUtils.instantiateClass(source,
GroovyBeanDefinitionSource.class);
load(loader);
}
}
if (isComponent(source)) {
this.annotatedReader.register(source);
return 1;
}
return 0;
}
private int load(GroovyBeanDefinitionSource source) {
int before = this.xmlReader.getRegistry().getBeanDefinitionCount();
((GroovyBeanDefinitionReader) this.groovyReader).beans(source.getBeans());
int after = this.xmlReader.getRegistry().getBeanDefinitionCount();
return after - before;
}
private int load(Resource source) {
if (source.getFilename().endsWith(".groovy")) {
if (this.groovyReader == null) {
throw new BeanDefinitionStoreException(
"Cannot load Groovy beans without Groovy on classpath");
}
return this.groovyReader.loadBeanDefinitions(source);
}
return this.xmlReader.loadBeanDefinitions(source);
}
private int load(Package source) {
return this.scanner.scan(source.getName());
}
private int load(CharSequence source) {
String resolvedSource = this.xmlReader.getEnvironment()
.resolvePlaceholders(source.toString());
// Attempt as a Class
try {
return load(ClassUtils.forName(resolvedSource, null));
}
catch (IllegalArgumentException | ClassNotFoundException ex) {
// swallow exception and continue
}
// Attempt as resources
Resource[] resources = findResources(resolvedSource);
int loadCount = 0;
boolean atLeastOneResourceExists = false;
for (Resource resource : resources) {
if (isLoadCandidate(resource)) {
atLeastOneResourceExists = true;
loadCount += load(resource);
}
}
if (atLeastOneResourceExists) {
return loadCount;
}
// Attempt as package
Package packageResource = findPackage(resolvedSource);
if (packageResource != null) {
return load(packageResource);
}
throw new IllegalArgumentException("Invalid source '" + resolvedSource + "'");
}
private boolean isGroovyPresent() {
return ClassUtils.isPresent("groovy.lang.MetaClass", null);
}
private Resource[] findResources(String source) {
ResourceLoader loader = (this.resourceLoader != null ? this.resourceLoader
: new PathMatchingResourcePatternResolver());
try {
if (loader instanceof ResourcePatternResolver) {
return ((ResourcePatternResolver) loader).getResources(source);
}
return new Resource[] { loader.getResource(source) };
}
catch (IOException ex) {
throw new IllegalStateException("Error reading source '" + source + "'");
}
}
private boolean isLoadCandidate(Resource resource) {
if (resource == null || !resource.exists()) {
return false;
}
if (resource instanceof ClassPathResource) {
// A simple package without a '.' may accidentally get loaded as an XML
// document if we're not careful. The result of getInputStream() will be
// a file list of the package content. We double check here that it's not
// actually a package.
String path = ((ClassPathResource) resource).getPath();
if (path.indexOf(".") == -1) {
try {
return Package.getPackage(path) == null;
}
catch (Exception ex) {
// Ignore
}
}
}
return true;
}
private Package findPackage(CharSequence source) {
Package pkg = Package.getPackage(source.toString());
if (pkg != null) {
return pkg;
}
try {
// Attempt to find a class in this package
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(
getClass().getClassLoader());
Resource[] resources = resolver.getResources(
ClassUtils.convertClassNameToResourcePath(source.toString())
+ "/*.class");
for (Resource resource : resources) {
String className = StringUtils
.stripFilenameExtension(resource.getFilename());
load(Class.forName(source.toString() + "." + className));
break;
}
}
catch (Exception ex) {
// swallow exception and continue
}
return Package.getPackage(source.toString());
}
private boolean isComponent(Class<?> type) {
// This has to be a bit of a guess. The only way to be sure that this type is
// eligible is to make a bean definition out of it and try to instantiate it.
if (AnnotationUtils.findAnnotation(type, Component.class) != null) {
return true;
}
// Nested anonymous classes are not eligible for registration, nor are groovy
// closures
if (type.getName().matches(".*\\$_.*closure.*") || type.isAnonymousClass()
|| type.getConstructors() == null || type.getConstructors().length == 0) {
return false;
}
return true;
}
/**
* Simple {@link TypeFilter} used to ensure that specified {@link Class} sources are
* not accidentally re-added during scanning.
*/
private static class ClassExcludeFilter
extends AbstractTypeHierarchyTraversingFilter {
private final Set<String> classNames = new HashSet<>();
ClassExcludeFilter(Object... sources) {
super(false, false);
for (Object source : sources) {
if (source instanceof Class<?>) {
this.classNames.add(((Class<?>) source).getName());
}
}
}
@Override
protected boolean matchClassName(String className) {
return this.classNames.contains(className);
}
}
/**
* Source for Bean definitions defined in Groovy.
*/
@FunctionalInterface
protected interface GroovyBeanDefinitionSource {
Closure<?> getBeans();
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.project.Project;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import static com.intellij.util.ObjectUtils.notNull;
/**
* Service for validating and parsing Java identifiers.
*/
public abstract class PsiNameHelper {
public static PsiNameHelper getInstance(Project project) {
return ServiceManager.getService(project, PsiNameHelper.class);
}
/**
* Checks if the specified text is a Java identifier, using the language level of the project
* with which the name helper is associated to filter out keywords.
*
* @param text the text to check.
* @return true if the text is an identifier, false otherwise
*/
public abstract boolean isIdentifier(@Nullable String text);
/**
* Checks if the specified text is a Java identifier, using the specified language level
* with which the name helper is associated to filter out keywords.
*
* @param text the text to check.
* @param languageLevel to check text against. For instance 'assert' or 'enum' might or might not be identifiers depending on language level
* @return true if the text is an identifier, false otherwise
*/
public abstract boolean isIdentifier(@Nullable String text, @NotNull LanguageLevel languageLevel);
/**
* Checks if the specified text is a Java keyword, using the language level of the project
* with which the name helper is associated.
*
* @param text the text to check.
* @return true if the text is a keyword, false otherwise
*/
public abstract boolean isKeyword(@Nullable String text);
/**
* Checks if the specified string is a qualified name (sequence of identifiers separated by
* periods).
*
* @param text the text to check.
* @return true if the text is a qualified name, false otherwise.
*/
public abstract boolean isQualifiedName(@Nullable String text);
@NotNull
public static String getShortClassName(@NotNull String referenceText) {
int lessPos = referenceText.length();
int bracesBalance = 0;
int i;
loop:
for (i = referenceText.length() - 1; i >= 0; i--) {
char ch = referenceText.charAt(i);
switch (ch) {
case ')':
case '>':
bracesBalance++;
break;
case '(':
case '<':
bracesBalance--;
lessPos = i;
break;
case '@':
case '.':
if (bracesBalance <= 0) break loop;
break;
default:
if (Character.isWhitespace(ch) && bracesBalance <= 0) {
for (int j = i + 1; j < lessPos; j++) {
if (!Character.isWhitespace(referenceText.charAt(j))) break loop;
}
lessPos = i;
}
}
}
return referenceText.substring(i + 1, lessPos).trim();
}
@NotNull
public static String getPresentableText(@NotNull PsiJavaCodeReferenceElement ref) {
String name = ref.getReferenceName();
PsiAnnotation[] annotations = PsiTreeUtil.getChildrenOfType(ref, PsiAnnotation.class);
return getPresentableText(name, notNull(annotations, PsiAnnotation.EMPTY_ARRAY), ref.getTypeParameters());
}
@NotNull
public static String getPresentableText(@Nullable String refName, @NotNull PsiAnnotation[] annotations, @NotNull PsiType[] types) {
if (types.length == 0 && annotations.length == 0) {
return refName != null ? refName : "";
}
StringBuilder buffer = new StringBuilder();
appendAnnotations(buffer, annotations, false);
buffer.append(refName);
appendTypeArgs(buffer, types, false, true);
return buffer.toString();
}
@NotNull
public static String getQualifiedClassName(@NotNull String referenceText, boolean removeWhitespace) {
if (removeWhitespace) {
referenceText = removeWhitespace(referenceText);
}
if (referenceText.indexOf('<') < 0) return referenceText;
final StringBuilder buffer = new StringBuilder(referenceText.length());
final char[] chars = referenceText.toCharArray();
int gtPos = 0;
int count = 0;
for (int i = 0; i < chars.length; i++) {
final char aChar = chars[i];
switch (aChar) {
case '<':
count++;
if (count == 1) buffer.append(new String(chars, gtPos, i - gtPos));
break;
case '>':
count--;
gtPos = i + 1;
break;
}
}
if (count == 0) {
buffer.append(new String(chars, gtPos, chars.length - gtPos));
}
return buffer.toString();
}
private static final Pattern WHITESPACE_PATTERN = Pattern.compile("(?:\\s)|(?:/\\*.*\\*/)|(?://[^\\n]*)");
private static String removeWhitespace(@NotNull String referenceText) {
boolean needsChange = false;
for (int i = 0; i < referenceText.length(); i++) {
char c = referenceText.charAt(i);
if (c == '/' || Character.isWhitespace(c)) {
needsChange = true;
break;
}
}
if (!needsChange) return referenceText;
return WHITESPACE_PATTERN.matcher(referenceText).replaceAll("");
}
/**
* Obtains text of all type parameter values in a reference.
* They go in left-to-right order: {@code A<List<String>, B<Integer>>} yields
* {@code ["List<String>", "B<Integer>"]}. Parameters of the outer reference are ignored:
* {@code A<List<String>>.B<Integer>} yields {@code ["Integer"]}
*
* @param referenceText the text of the reference to calculate type parameters for.
* @return the calculated array of type parameters.
*/
@NotNull
public static String[] getClassParametersText(@NotNull String referenceText) {
if (referenceText.indexOf('<') < 0) return ArrayUtil.EMPTY_STRING_ARRAY;
final char[] chars = referenceText.toCharArray();
int afterLastDotIndex = 0;
int level = 0;
for (int i = 0; i < chars.length; i++) {
char aChar = chars[i];
switch (aChar) {
case '<':
level++;
break;
case '.':
if (level == 0) afterLastDotIndex = i + 1;
break;
case '>':
level--;
break;
}
}
if (level != 0) return ArrayUtil.EMPTY_STRING_ARRAY;
int dim = 0;
for (int i = afterLastDotIndex; i < chars.length; i++) {
char aChar = chars[i];
switch (aChar) {
case '<':
level++;
if (level == 1) dim++;
break;
case ',':
if (level == 1) dim++;
break;
case '>':
level--;
break;
}
}
if (level != 0 || dim == 0) return ArrayUtil.EMPTY_STRING_ARRAY;
final String[] result = new String[dim];
dim = 0;
int ltPos = 0;
for (int i = afterLastDotIndex; i < chars.length; i++) {
final char aChar = chars[i];
switch (aChar) {
case '<':
level++;
if (level == 1) ltPos = i;
break;
case ',':
if (level == 1) {
result[dim++] = new String(chars, ltPos + 1, i - ltPos - 1);
ltPos = i;
}
break;
case '>':
level--;
if (level == 0) result[dim++] = new String(chars, ltPos + 1, i - ltPos - 1);
break;
}
}
return result;
}
public static boolean isSubpackageOf(@NotNull String subpackageName, @NotNull String packageName) {
return subpackageName.equals(packageName) ||
subpackageName.startsWith(packageName) && subpackageName.charAt(packageName.length()) == '.';
}
public static void appendTypeArgs(@NotNull StringBuilder sb, @NotNull PsiType[] types, boolean canonical, boolean annotated) {
if (types.length == 0) return;
sb.append('<');
for (int i = 0; i < types.length; i++) {
if (i > 0) {
sb.append(canonical ? "," : ", ");
}
PsiType type = types[i];
if (canonical) {
sb.append(type.getCanonicalText(annotated));
}
else {
sb.append(type.getPresentableText());
}
}
sb.append('>');
}
public static boolean appendAnnotations(@NotNull StringBuilder sb, @NotNull PsiAnnotation[] annotations, boolean canonical) {
return appendAnnotations(sb, Arrays.asList(annotations), canonical);
}
public static boolean appendAnnotations(@NotNull StringBuilder sb, @NotNull List<PsiAnnotation> annotations, boolean canonical) {
boolean updated = false;
for (PsiAnnotation annotation : annotations) {
if (canonical) {
String name = annotation.getQualifiedName();
if (name != null) {
sb.append('@').append(name).append(annotation.getParameterList().getText()).append(' ');
updated = true;
}
}
else {
PsiJavaCodeReferenceElement refElement = annotation.getNameReferenceElement();
if (refElement != null) {
sb.append('@').append(refElement.getText()).append(' ');
updated = true;
}
}
}
return updated;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle.unsafe;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import scala.Tuple2;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.spark.SparkConf;
import org.apache.spark.TaskContext;
import org.apache.spark.executor.ShuffleWriteMetrics;
import org.apache.spark.serializer.SerializerInstance;
import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.storage.*;
import org.apache.spark.unsafe.PlatformDependent;
import org.apache.spark.unsafe.memory.MemoryBlock;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
import org.apache.spark.util.Utils;
/**
* An external sorter that is specialized for sort-based shuffle.
* <p>
* Incoming records are appended to data pages. When all records have been inserted (or when the
* current thread's shuffle memory limit is reached), the in-memory records are sorted according to
* their partition ids (using a {@link UnsafeShuffleInMemorySorter}). The sorted records are then
* written to a single output file (or multiple files, if we've spilled). The format of the output
* files is the same as the format of the final output file written by
* {@link org.apache.spark.shuffle.sort.SortShuffleWriter}: each output partition's records are
* written as a single serialized, compressed stream that can be read with a new decompression and
* deserialization stream.
* <p>
* Unlike {@link org.apache.spark.util.collection.ExternalSorter}, this sorter does not merge its
* spill files. Instead, this merging is performed in {@link UnsafeShuffleWriter}, which uses a
* specialized merge procedure that avoids extra serialization/deserialization.
*/
final class UnsafeShuffleExternalSorter {
private final Logger logger = LoggerFactory.getLogger(UnsafeShuffleExternalSorter.class);
private static final int PAGE_SIZE = PackedRecordPointer.MAXIMUM_PAGE_SIZE_BYTES;
@VisibleForTesting
static final int DISK_WRITE_BUFFER_SIZE = 1024 * 1024;
@VisibleForTesting
static final int MAX_RECORD_SIZE = PAGE_SIZE - 4;
private final int initialSize;
private final int numPartitions;
private final TaskMemoryManager memoryManager;
private final ShuffleMemoryManager shuffleMemoryManager;
private final BlockManager blockManager;
private final TaskContext taskContext;
private final ShuffleWriteMetrics writeMetrics;
/** The buffer size to use when writing spills using DiskBlockObjectWriter */
private final int fileBufferSizeBytes;
/**
* Memory pages that hold the records being sorted. The pages in this list are freed when
* spilling, although in principle we could recycle these pages across spills (on the other hand,
* this might not be necessary if we maintained a pool of re-usable pages in the TaskMemoryManager
* itself).
*/
private final LinkedList<MemoryBlock> allocatedPages = new LinkedList<MemoryBlock>();
private final LinkedList<SpillInfo> spills = new LinkedList<SpillInfo>();
// These variables are reset after spilling:
private UnsafeShuffleInMemorySorter sorter;
private MemoryBlock currentPage = null;
private long currentPagePosition = -1;
private long freeSpaceInCurrentPage = 0;
public UnsafeShuffleExternalSorter(
TaskMemoryManager memoryManager,
ShuffleMemoryManager shuffleMemoryManager,
BlockManager blockManager,
TaskContext taskContext,
int initialSize,
int numPartitions,
SparkConf conf,
ShuffleWriteMetrics writeMetrics) throws IOException {
this.memoryManager = memoryManager;
this.shuffleMemoryManager = shuffleMemoryManager;
this.blockManager = blockManager;
this.taskContext = taskContext;
this.initialSize = initialSize;
this.numPartitions = numPartitions;
// Use getSizeAsKb (not bytes) to maintain backwards compatibility if no units are provided
this.fileBufferSizeBytes = (int) conf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024;
this.writeMetrics = writeMetrics;
initializeForWriting();
}
/**
* Allocates new sort data structures. Called when creating the sorter and after each spill.
*/
private void initializeForWriting() throws IOException {
// TODO: move this sizing calculation logic into a static method of sorter:
final long memoryRequested = initialSize * 8L;
final long memoryAcquired = shuffleMemoryManager.tryToAcquire(memoryRequested);
if (memoryAcquired != memoryRequested) {
shuffleMemoryManager.release(memoryAcquired);
throw new IOException("Could not acquire " + memoryRequested + " bytes of memory");
}
this.sorter = new UnsafeShuffleInMemorySorter(initialSize);
}
/**
* Sorts the in-memory records and writes the sorted records to an on-disk file.
* This method does not free the sort data structures.
*
* @param isLastFile if true, this indicates that we're writing the final output file and that the
* bytes written should be counted towards shuffle spill metrics rather than
* shuffle write metrics.
*/
private void writeSortedFile(boolean isLastFile) throws IOException {
final ShuffleWriteMetrics writeMetricsToUse;
if (isLastFile) {
// We're writing the final non-spill file, so we _do_ want to count this as shuffle bytes.
writeMetricsToUse = writeMetrics;
} else {
// We're spilling, so bytes written should be counted towards spill rather than write.
// Create a dummy WriteMetrics object to absorb these metrics, since we don't want to count
// them towards shuffle bytes written.
writeMetricsToUse = new ShuffleWriteMetrics();
}
// This call performs the actual sort.
final UnsafeShuffleInMemorySorter.UnsafeShuffleSorterIterator sortedRecords =
sorter.getSortedIterator();
// Currently, we need to open a new DiskBlockObjectWriter for each partition; we can avoid this
// after SPARK-5581 is fixed.
BlockObjectWriter writer;
// Small writes to DiskBlockObjectWriter will be fairly inefficient. Since there doesn't seem to
// be an API to directly transfer bytes from managed memory to the disk writer, we buffer
// data through a byte array. This array does not need to be large enough to hold a single
// record;
final byte[] writeBuffer = new byte[DISK_WRITE_BUFFER_SIZE];
// Because this output will be read during shuffle, its compression codec must be controlled by
// spark.shuffle.compress instead of spark.shuffle.spill.compress, so we need to use
// createTempShuffleBlock here; see SPARK-3426 for more details.
final Tuple2<TempShuffleBlockId, File> spilledFileInfo =
blockManager.diskBlockManager().createTempShuffleBlock();
final File file = spilledFileInfo._2();
final TempShuffleBlockId blockId = spilledFileInfo._1();
final SpillInfo spillInfo = new SpillInfo(numPartitions, file, blockId);
// Unfortunately, we need a serializer instance in order to construct a DiskBlockObjectWriter.
// Our write path doesn't actually use this serializer (since we end up calling the `write()`
// OutputStream methods), but DiskBlockObjectWriter still calls some methods on it. To work
// around this, we pass a dummy no-op serializer.
final SerializerInstance ser = DummySerializerInstance.INSTANCE;
writer = blockManager.getDiskWriter(blockId, file, ser, fileBufferSizeBytes, writeMetricsToUse);
int currentPartition = -1;
while (sortedRecords.hasNext()) {
sortedRecords.loadNext();
final int partition = sortedRecords.packedRecordPointer.getPartitionId();
assert (partition >= currentPartition);
if (partition != currentPartition) {
// Switch to the new partition
if (currentPartition != -1) {
writer.commitAndClose();
spillInfo.partitionLengths[currentPartition] = writer.fileSegment().length();
}
currentPartition = partition;
writer =
blockManager.getDiskWriter(blockId, file, ser, fileBufferSizeBytes, writeMetricsToUse);
}
final long recordPointer = sortedRecords.packedRecordPointer.getRecordPointer();
final Object recordPage = memoryManager.getPage(recordPointer);
final long recordOffsetInPage = memoryManager.getOffsetInPage(recordPointer);
int dataRemaining = PlatformDependent.UNSAFE.getInt(recordPage, recordOffsetInPage);
long recordReadPosition = recordOffsetInPage + 4; // skip over record length
while (dataRemaining > 0) {
final int toTransfer = Math.min(DISK_WRITE_BUFFER_SIZE, dataRemaining);
PlatformDependent.copyMemory(
recordPage,
recordReadPosition,
writeBuffer,
PlatformDependent.BYTE_ARRAY_OFFSET,
toTransfer);
writer.write(writeBuffer, 0, toTransfer);
recordReadPosition += toTransfer;
dataRemaining -= toTransfer;
}
writer.recordWritten();
}
if (writer != null) {
writer.commitAndClose();
// If `writeSortedFile()` was called from `closeAndGetSpills()` and no records were inserted,
// then the file might be empty. Note that it might be better to avoid calling
// writeSortedFile() in that case.
if (currentPartition != -1) {
spillInfo.partitionLengths[currentPartition] = writer.fileSegment().length();
spills.add(spillInfo);
}
}
if (!isLastFile) { // i.e. this is a spill file
// The current semantics of `shuffleRecordsWritten` seem to be that it's updated when records
// are written to disk, not when they enter the shuffle sorting code. DiskBlockObjectWriter
// relies on its `recordWritten()` method being called in order to trigger periodic updates to
// `shuffleBytesWritten`. If we were to remove the `recordWritten()` call and increment that
// counter at a higher-level, then the in-progress metrics for records written and bytes
// written would get out of sync.
//
// When writing the last file, we pass `writeMetrics` directly to the DiskBlockObjectWriter;
// in all other cases, we pass in a dummy write metrics to capture metrics, then copy those
// metrics to the true write metrics here. The reason for performing this copying is so that
// we can avoid reporting spilled bytes as shuffle write bytes.
//
// Note that we intentionally ignore the value of `writeMetricsToUse.shuffleWriteTime()`.
// Consistent with ExternalSorter, we do not count this IO towards shuffle write time.
// This means that this IO time is not accounted for anywhere; SPARK-3577 will fix this.
writeMetrics.incShuffleRecordsWritten(writeMetricsToUse.shuffleRecordsWritten());
taskContext.taskMetrics().incDiskBytesSpilled(writeMetricsToUse.shuffleBytesWritten());
}
}
/**
* Sort and spill the current records in response to memory pressure.
*/
@VisibleForTesting
void spill() throws IOException {
logger.info("Thread {} spilling sort data of {} to disk ({} {} so far)",
Thread.currentThread().getId(),
Utils.bytesToString(getMemoryUsage()),
spills.size(),
spills.size() > 1 ? " times" : " time");
writeSortedFile(false);
final long sorterMemoryUsage = sorter.getMemoryUsage();
sorter = null;
shuffleMemoryManager.release(sorterMemoryUsage);
final long spillSize = freeMemory();
taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
initializeForWriting();
}
private long getMemoryUsage() {
return sorter.getMemoryUsage() + (allocatedPages.size() * (long) PAGE_SIZE);
}
private long freeMemory() {
long memoryFreed = 0;
for (MemoryBlock block : allocatedPages) {
memoryManager.freePage(block);
shuffleMemoryManager.release(block.size());
memoryFreed += block.size();
}
allocatedPages.clear();
currentPage = null;
currentPagePosition = -1;
freeSpaceInCurrentPage = 0;
return memoryFreed;
}
/**
* Force all memory and spill files to be deleted; called by shuffle error-handling code.
*/
public void cleanupAfterError() {
freeMemory();
for (SpillInfo spill : spills) {
if (spill.file.exists() && !spill.file.delete()) {
logger.error("Unable to delete spill file {}", spill.file.getPath());
}
}
if (sorter != null) {
shuffleMemoryManager.release(sorter.getMemoryUsage());
sorter = null;
}
}
/**
* Checks whether there is enough space to insert a new record into the sorter.
*
* @param requiredSpace the required space in the data page, in bytes, including space for storing
* the record size.
* @return true if the record can be inserted without requiring more allocations, false otherwise.
*/
private boolean haveSpaceForRecord(int requiredSpace) {
assert (requiredSpace > 0);
return (sorter.hasSpaceForAnotherRecord() && (requiredSpace <= freeSpaceInCurrentPage));
}
/**
* Allocates more memory in order to insert an additional record. This will request additional
* memory from the {@link ShuffleMemoryManager} and spill if the requested memory can not be
* obtained.
*
* @param requiredSpace the required space in the data page, in bytes, including space for storing
* the record size.
*/
private void allocateSpaceForRecord(int requiredSpace) throws IOException {
if (!sorter.hasSpaceForAnotherRecord()) {
logger.debug("Attempting to expand sort pointer array");
final long oldPointerArrayMemoryUsage = sorter.getMemoryUsage();
final long memoryToGrowPointerArray = oldPointerArrayMemoryUsage * 2;
final long memoryAcquired = shuffleMemoryManager.tryToAcquire(memoryToGrowPointerArray);
if (memoryAcquired < memoryToGrowPointerArray) {
shuffleMemoryManager.release(memoryAcquired);
spill();
} else {
sorter.expandPointerArray();
shuffleMemoryManager.release(oldPointerArrayMemoryUsage);
}
}
if (requiredSpace > freeSpaceInCurrentPage) {
logger.trace("Required space {} is less than free space in current page ({})", requiredSpace,
freeSpaceInCurrentPage);
// TODO: we should track metrics on the amount of space wasted when we roll over to a new page
// without using the free space at the end of the current page. We should also do this for
// BytesToBytesMap.
if (requiredSpace > PAGE_SIZE) {
throw new IOException("Required space " + requiredSpace + " is greater than page size (" +
PAGE_SIZE + ")");
} else {
final long memoryAcquired = shuffleMemoryManager.tryToAcquire(PAGE_SIZE);
if (memoryAcquired < PAGE_SIZE) {
shuffleMemoryManager.release(memoryAcquired);
spill();
final long memoryAcquiredAfterSpilling = shuffleMemoryManager.tryToAcquire(PAGE_SIZE);
if (memoryAcquiredAfterSpilling != PAGE_SIZE) {
shuffleMemoryManager.release(memoryAcquiredAfterSpilling);
throw new IOException("Unable to acquire " + PAGE_SIZE + " bytes of memory");
}
}
currentPage = memoryManager.allocatePage(PAGE_SIZE);
currentPagePosition = currentPage.getBaseOffset();
freeSpaceInCurrentPage = PAGE_SIZE;
allocatedPages.add(currentPage);
}
}
}
/**
* Write a record to the shuffle sorter.
*/
public void insertRecord(
Object recordBaseObject,
long recordBaseOffset,
int lengthInBytes,
int partitionId) throws IOException {
// Need 4 bytes to store the record length.
final int totalSpaceRequired = lengthInBytes + 4;
if (!haveSpaceForRecord(totalSpaceRequired)) {
allocateSpaceForRecord(totalSpaceRequired);
}
final long recordAddress =
memoryManager.encodePageNumberAndOffset(currentPage, currentPagePosition);
final Object dataPageBaseObject = currentPage.getBaseObject();
PlatformDependent.UNSAFE.putInt(dataPageBaseObject, currentPagePosition, lengthInBytes);
currentPagePosition += 4;
freeSpaceInCurrentPage -= 4;
PlatformDependent.copyMemory(
recordBaseObject,
recordBaseOffset,
dataPageBaseObject,
currentPagePosition,
lengthInBytes);
currentPagePosition += lengthInBytes;
freeSpaceInCurrentPage -= lengthInBytes;
sorter.insertRecord(recordAddress, partitionId);
}
/**
* Close the sorter, causing any buffered data to be sorted and written out to disk.
*
* @return metadata for the spill files written by this sorter. If no records were ever inserted
* into this sorter, then this will return an empty array.
* @throws IOException
*/
public SpillInfo[] closeAndGetSpills() throws IOException {
try {
if (sorter != null) {
// Do not count the final file towards the spill count.
writeSortedFile(true);
freeMemory();
}
return spills.toArray(new SpillInfo[spills.size()]);
} catch (IOException e) {
cleanupAfterError();
throw e;
}
}
}
|
|
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.exceptions.EntityType;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.PackageMaterialConfig;
import com.thoughtworks.go.config.materials.PluggableSCMMaterialConfig;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig;
import com.thoughtworks.go.config.parts.XmlPartialConfigProvider;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.config.remote.PartialConfig;
import com.thoughtworks.go.config.remote.RepoConfigOrigin;
import com.thoughtworks.go.domain.config.*;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.listener.EntityConfigChangedListener;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.result.DefaultLocalizedOperationResult;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.serverhealth.HealthStateScope;
import com.thoughtworks.go.serverhealth.ServerHealthService;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.CachedDigestUtils;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.GoConstants;
import com.thoughtworks.go.util.SystemEnvironment;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.io.ByteArrayOutputStream;
import java.util.UUID;
import static com.thoughtworks.go.helper.MaterialConfigsMother.git;
import static com.thoughtworks.go.util.TestUtils.contains;
import static java.util.Arrays.asList;
import static junit.framework.Assert.assertTrue;
import static junit.framework.TestCase.assertFalse;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml",
"classpath:WEB-INF/spring-all-servlet.xml",
})
public class PipelineConfigServiceIntegrationTest {
static {
new SystemEnvironment().setProperty(GoConstants.USE_COMPRESSED_JAVASCRIPT, "false");
}
@Autowired
private PipelineConfigService pipelineConfigService;
@Autowired
private GoConfigService goConfigService;
@Autowired
private GoConfigDao goConfigDao;
@Autowired
private DatabaseAccessHelper dbHelper;
@Autowired
private ConfigRepository configRepository;
@Autowired
private ConfigCache configCache;
@Autowired
private ConfigElementImplementationRegistry registry;
@Autowired
private GoPartialConfig goPartialConfig;
@Autowired
private CachedGoPartials cachedGoPartials;
@Autowired
private ServerHealthService serverHealthService;
private GoConfigFileHelper configHelper;
private PipelineConfig pipelineConfig;
private Username user;
private String headCommitBeforeUpdate;
private HttpLocalizedOperationResult result;
private String groupName = "jumbo";
private ConfigRepoConfig repoConfig1;
private PartialConfig partialConfig;
@Rule
public ExpectedException thrown = ExpectedException.none();
private String remoteDownstreamPipelineName;
private ConfigRepoConfig repoConfig2;
@Before
public void setup() throws Exception {
cachedGoPartials.clear();
configHelper = new GoConfigFileHelper();
dbHelper.onSetUp();
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
goConfigService.forceNotifyListeners();
user = new Username(new CaseInsensitiveString("current"));
pipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), git("FOO"));
goConfigService.addPipeline(pipelineConfig, groupName);
repoConfig1 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url"), XmlPartialConfigProvider.providerName);
repoConfig2 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url2"), XmlPartialConfigProvider.providerName);
goConfigService.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getConfigRepos().add(repoConfig1);
cruiseConfig.getConfigRepos().add(repoConfig2);
return cruiseConfig;
}
});
GoCipher goCipher = new GoCipher();
goConfigService.updateServerConfig(new MailHost(goCipher), false, goConfigService.configFileMd5(), "artifacts", null, null, "0", null, null, "foo");
UpdateConfigCommand command = goConfigService.modifyAdminPrivilegesCommand(asList(user.getUsername().toString()), new TriStateSelection(Admin.GO_SYSTEM_ADMIN, TriStateSelection.Action.add));
goConfigService.updateConfig(command);
remoteDownstreamPipelineName = "remote-downstream";
partialConfig = PartialConfigMother.pipelineWithDependencyMaterial(remoteDownstreamPipelineName, pipelineConfig, new RepoConfigOrigin(repoConfig1, "repo1_r1"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, partialConfig);
PartialConfig partialConfigFromRepo2 = PartialConfigMother.withPipeline("independent-pipeline", new RepoConfigOrigin(repoConfig2, "repo2_r1"));
goPartialConfig.onSuccessPartialConfig(repoConfig2, partialConfigFromRepo2);
result = new HttpLocalizedOperationResult();
headCommitBeforeUpdate = configRepository.getCurrentRevCommit().name();
goConfigService.security().securityAuthConfigs().add(new SecurityAuthConfig("file", "cd.go.authentication.passwordfile"));
}
@After
public void tearDown() throws Exception {
for (PartialConfig partial : cachedGoPartials.lastValidPartials()) {
assertThat(ErrorCollector.getAllErrors(partial).isEmpty(), is(true));
}
for (PartialConfig partial : cachedGoPartials.lastKnownPartials()) {
assertThat(ErrorCollector.getAllErrors(partial).isEmpty(), is(true));
}
cachedGoPartials.clear();
configHelper.onTearDown();
dbHelper.onTearDown();
}
@Test
public void shouldCreatePipelineConfigWhenPipelineGroupExists() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
pipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), git("FOO"));
pipelineConfigService.createPipelineConfig(user, pipelineConfig, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertThat(goConfigDao.loadConfigHolder(), is(not(goConfigHolderBeforeUpdate)));
PipelineConfig savedPipelineConfig = goConfigDao.loadForEditing().getPipelineConfigByName(pipelineConfig.name());
assertThat(savedPipelineConfig, is(pipelineConfig));
assertThat(configRepository.getCurrentRevCommit().name(), is(not(headCommitBeforeUpdate)));
assertThat(configRepository.getCurrentRevision().getUsername(), is(user.getDisplayName()));
assertThat(configRepository.getCurrentRevision().getMd5(), is(not(goConfigHolderBeforeUpdate.config.getMd5())));
assertThat(configRepository.getCurrentRevision().getMd5(), is(goConfigDao.loadConfigHolder().config.getMd5()));
assertThat(configRepository.getCurrentRevision().getMd5(), is(goConfigDao.loadConfigHolder().configForEdit.getMd5()));
}
@Test
public void shouldCreatePipelineConfigWhenPipelineGroupDoesNotExist() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
PipelineConfig downstream = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipelineConfigService.createPipelineConfig(user, downstream, result, "does-not-exist");
assertThat(result.toString(), result.isSuccessful(), is(true));
assertThat(goConfigDao.loadConfigHolder(), is(not(goConfigHolderBeforeUpdate)));
PipelineConfig savedPipelineConfig = goConfigDao.loadForEditing().getPipelineConfigByName(downstream.name());
assertThat(savedPipelineConfig, is(downstream));
assertThat(configRepository.getCurrentRevCommit().name(), is(not(headCommitBeforeUpdate)));
assertThat(configRepository.getCurrentRevision().getUsername(), is(user.getDisplayName()));
}
@Test
public void shouldUpdatePipelineConfigWhenDependencyMaterialHasTemplateDefined() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
pipelineConfig.clear();
pipelineConfig.setTemplateName(templateName);
pipelineConfig.addParam(new ParamConfig("SOME_PARAM", "SOME_VALUE"));
CruiseConfig cruiseConfig = goConfigDao.loadConfigHolder().configForEdit;
cruiseConfig.update(groupName, pipelineConfig.name().toString(), pipelineConfig);
saveConfig(cruiseConfig);
PipelineConfig downstream = GoConfigMother.createPipelineConfigWithMaterialConfig("downstream", new DependencyMaterialConfig(pipelineConfig.name(), new CaseInsensitiveString("stage")));
pipelineConfigService.createPipelineConfig(user, downstream, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertTrue(downstream.materialConfigs().first().errors().isEmpty());
}
@Test
public void shouldUpdatePipelineConfigWithDependencyMaterialWhenUpstreamPipelineHasTemplateDefinedANDUpstreamPipelineIsCreatedUsingCreatePipelineFlow() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
MaterialConfigs materialConfigs = new MaterialConfigs();
materialConfigs.add(new DependencyMaterialConfig(pipelineConfig.name(), new CaseInsensitiveString("stage")));
PipelineConfig upstream = new PipelineConfig(new CaseInsensitiveString("upstream"), materialConfigs);
upstream.setTemplateName(templateName);
upstream.addParam(new ParamConfig("SOME_PARAM", "SOME_VALUE"));
pipelineConfigService.createPipelineConfig(user, upstream, result, groupName);
PipelineConfig downstream = GoConfigMother.createPipelineConfigWithMaterialConfig("downstream", new DependencyMaterialConfig(upstream.name(), new CaseInsensitiveString("stage")));
pipelineConfigService.createPipelineConfig(user, downstream, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertTrue(downstream.materialConfigs().first().errors().isEmpty());
}
@Test
public void shouldUpdatePipelineConfigWhenFetchTaskFromUpstreamHasPipelineWithTemplateDefined() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
pipelineConfig.clear();
pipelineConfig.setTemplateName(templateName);
pipelineConfig.addParam(new ParamConfig("SOME_PARAM", "SOME_VALUE"));
CaseInsensitiveString stage = new CaseInsensitiveString("stage");
CaseInsensitiveString job = new CaseInsensitiveString("job");
CruiseConfig cruiseConfig = goConfigDao.loadConfigHolder().configForEdit;
cruiseConfig.update(groupName, pipelineConfig.name().toString(), pipelineConfig);
saveConfig(cruiseConfig);
PipelineConfig downstream = GoConfigMother.createPipelineConfigWithMaterialConfig("downstream", new DependencyMaterialConfig(pipelineConfig.name(), stage));
downstream.getStage(stage).getJobs().first().addTask(new FetchTask(pipelineConfig.name(), stage, job, "src", "dest"));
pipelineConfigService.createPipelineConfig(user, downstream, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertTrue(downstream.materialConfigs().first().errors().isEmpty());
}
@Test
public void shouldNotCreatePipelineConfigWhenAPipelineBySameNameAlreadyExists() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
PipelineConfig pipelineBeingCreated = GoConfigMother.createPipelineConfigWithMaterialConfig(pipelineConfig.name().toLower(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipelineConfigService.createPipelineConfig(user, pipelineBeingCreated, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertFalse(pipelineBeingCreated.errors().isEmpty());
assertThat(pipelineBeingCreated.errors().on(PipelineConfig.NAME), is(String.format("You have defined multiple pipelines named '%s'. Pipeline names must be unique. Source(s): [cruise-config.xml]", pipelineConfig.name())));
assertThat(goConfigDao.loadConfigHolder(), is(goConfigHolderBeforeUpdate));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
}
@Test
public void shouldNotCreatePipelineConfigWhenInvalidGroupNameIsPassed() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
PipelineConfig pipelineBeingCreated = GoConfigMother.createPipelineConfigWithMaterialConfig(pipelineConfig.name().toLower(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipelineConfigService.createPipelineConfig(user, pipelineBeingCreated, result, "%$-with-invalid-characters");
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertFalse(pipelineBeingCreated.errors().isEmpty());
assertThat(pipelineBeingCreated.errors().on(PipelineConfigs.GROUP), contains("Invalid group name '%$-with-invalid-characters'"));
assertThat(goConfigDao.loadConfigHolder(), is(goConfigHolderBeforeUpdate));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrors() throws GitAPIException {
ExecTask execTask = new ExecTask("ls", "-al", "#{foo}");
FetchTask fetchTask = new FetchTask(pipelineConfig.name(), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "srcfile", "/usr/dest");
JobConfig job = new JobConfig("default-job");
job.addTask(execTask);
job.addTask(fetchTask);
StageConfig stage = new StageConfig(new CaseInsensitiveString("default-stage"), new JobConfigs(job));
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipeline.addParam(new ParamConfig("foo", "."));
pipeline.addStageWithoutValidityAssertion(stage);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
String expectedError = String.format("Task of job 'default-job' in stage 'default-stage' of pipeline '%s' has dest path '/usr/dest' which is outside the working directory.", pipeline.name());
assertThat(fetchTask.errors().on("dest"), is(expectedError));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnEnvironmentVariables() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipeline.addEnvironmentVariable("", "PipelineEnvVar");
EnvironmentVariablesConfig stageVariables = new EnvironmentVariablesConfig();
EnvironmentVariableConfig stageVar = new EnvironmentVariableConfig("", "StageEnvVar");
stageVariables.add(stageVar);
EnvironmentVariablesConfig jobVariables = new EnvironmentVariablesConfig();
EnvironmentVariableConfig jobVar = new EnvironmentVariableConfig("", "JobEnvVar");
jobVariables.add(jobVar);
StageConfig stageConfig = pipeline.get(0);
stageConfig.setVariables(stageVariables);
JobConfig jobConfig = stageConfig.getJobs().get(0);
jobConfig.setVariables(jobVariables);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(pipeline.getVariables().get(0).errors().firstError(), is(String.format("Environment Variable cannot have an empty name for pipeline '" + pipeline.name() + "'.", pipeline.name())));
assertThat(stageVar.errors().firstError(), is(String.format("Environment Variable cannot have an empty name for stage 'stage'.", pipeline.name())));
assertThat(jobVar.errors().firstError(), is(String.format("Environment Variable cannot have an empty name for job 'job'.", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnParameters() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
ParamConfig param = new ParamConfig("", "Foo");
pipeline.addParam(param);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(param.errors().firstError(), is(String.format("Parameter cannot have an empty name for pipeline '" + pipeline.name() + "'.", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnTrackingTool() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
TrackingTool trackingTool = new TrackingTool();
pipeline.setTrackingTool(trackingTool);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(trackingTool.errors().firstError(), is(String.format("Regex should be populated", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnArtifactPlans() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
JobConfig jobConfig = pipeline.get(0).getJobs().get(0);
ArtifactConfigs artifactConfigs = new ArtifactConfigs();
BuildArtifactConfig buildArtifactConfig = new BuildArtifactConfig("", "/foo");
artifactConfigs.add(buildArtifactConfig);
jobConfig.setArtifactConfigs(artifactConfigs);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(buildArtifactConfig.errors().firstError(), is(String.format("Job 'job' has an artifact with an empty source", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnTimer() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
TimerConfig timer = new TimerConfig(null, true);
pipeline.setTimer(timer);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(timer.errors().firstError(), is("Timer Spec can not be null."));
}
@Test
public void shouldShowPipelineConfigErrorMessageWhenPipelineConfigHasApprovalRelatedErrors() {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
StageConfig stageConfig = pipeline.get(0);
stageConfig.setApproval(new Approval(new AuthConfig(new AdminRole(new CaseInsensitiveString("non-existent-role")))));
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(stageConfig.getApproval().getAuthConfig().errors().firstError(), is("Role \"non-existent-role\" does not exist."));
}
@Test
public void shouldShowPipelineConfigErrorMessageWhenPipelineConfigHasApprovalTypeErrors() {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
StageConfig stageConfig = pipeline.get(0);
Approval approval = new Approval();
approval.setType("not-success-or-manual");
stageConfig.setApproval(approval);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(stageConfig.getApproval().errors().firstError(), is("You have defined approval type as 'not-success-or-manual'. Approval can only be of the type 'manual' or 'success'."));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnProperties() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
JobConfig jobConfig = pipeline.get(0).getJobs().get(0);
ArtifactPropertiesConfig properties = new ArtifactPropertiesConfig();
ArtifactPropertyConfig artifactPropertiesGenerator = new ArtifactPropertyConfig();
properties.add(artifactPropertiesGenerator);
jobConfig.setProperties(properties);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(artifactPropertiesGenerator.errors().firstError(), is(String.format("Invalid property name 'null'. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedHasErrorsOnTabs() throws GitAPIException {
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
JobConfig jobConfig = pipeline.get(0).getJobs().get(0);
jobConfig.addTab("", "/foo");
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(jobConfig.getTabs().first().errors().firstError(), is(String.format("Tab name '' is invalid. This must be alphanumeric and can contain underscores and periods.", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingCreatedFromTemplateHasErrors() throws GitAPIException {
JobConfigs jobConfigs = new JobConfigs();
jobConfigs.add(new JobConfig(new CaseInsensitiveString("Job")));
StageConfig stage = new StageConfig(new CaseInsensitiveString("Stage-1"), jobConfigs);
final PipelineTemplateConfig templateConfig = new PipelineTemplateConfig(new CaseInsensitiveString("foo"), stage);
goConfigDao.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addTemplate(templateConfig);
return cruiseConfig;
}
});
PipelineConfig pipeline = GoConfigMother.createPipelineConfigWithMaterialConfig();
pipeline.templatize(templateConfig.name());
DependencyMaterialConfig material = new DependencyMaterialConfig(new CaseInsensitiveString("Invalid-pipeline"), new CaseInsensitiveString("Stage"));
pipeline.addMaterialConfig(material);
pipelineConfigService.createPipelineConfig(user, pipeline, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(material.errors().firstError(), is(String.format("Pipeline with name 'Invalid-pipeline' does not exist, it is defined as a dependency for pipeline 'pipeline' (cruise-config.xml)", pipeline.name())));
}
@Test
public void shouldShowThePipelineConfigErrorMessageWhenPipelineBeingUpdatedHasErrors() throws GitAPIException {
ExecTask execTask = new ExecTask("ls", "-al", "#{foo}");
FetchTask fetchTask = new FetchTask(pipelineConfig.name(), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "srcfile", "/usr/dest");
JobConfig job = new JobConfig("default-job");
job.addTask(execTask);
job.addTask(fetchTask);
StageConfig stage = new StageConfig(new CaseInsensitiveString("default-stage"), new JobConfigs(job));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.add(stage);
pipelineConfig.addParam(new ParamConfig("foo", "."));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
String expectedError = String.format("Task of job 'default-job' in stage 'default-stage' of pipeline '%s' has dest path '/usr/dest' which is outside the working directory.", pipelineConfig.name());
assertThat(fetchTask.errors().on("dest"), is(expectedError));
}
@Test
public void shouldUpdatePipelineConfig() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.add(new StageConfig(new CaseInsensitiveString("additional_stage"), new JobConfigs(new JobConfig(new CaseInsensitiveString("addtn_job")))));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertThat(goConfigDao.loadConfigHolder(), is(not(goConfigHolderBeforeUpdate)));
StageConfig newlyAddedStage = goConfigDao.loadForEditing().getPipelineConfigByName(pipelineConfig.name()).getStage(new CaseInsensitiveString("additional_stage"));
assertThat(newlyAddedStage, is(not(nullValue())));
assertThat(newlyAddedStage.getJobs().isEmpty(), is(false));
assertThat(newlyAddedStage.getJobs().first().name().toString(), is("addtn_job"));
assertThat(configRepository.getCurrentRevCommit().name(), is(not(headCommitBeforeUpdate)));
assertThat(configRepository.getCurrentRevision().getUsername(), is(user.getDisplayName()));
}
@Test
public void shouldNotUpdatePipelineConfigInCaseOfValidationErrors() throws GitAPIException {
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.setLabelTemplate("LABEL");
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertThat(pipelineConfig.errors().on(PipelineConfig.LABEL_TEMPLATE), contains("Invalid label"));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldNotUpdatePipelineWhenPreprocessingFails() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.clear();
pipelineConfig.setTemplateName(templateName);
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.toString(), result.toString().contains("Parameter 'SOME_PARAM' is not defined"), is(true));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldNotUpdatePipelineWhenPipelineIsAssociatedWithTemplateAsWellAsHasStagesDefinedLocally() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.clear();
pipelineConfig.setTemplateName(templateName);
pipelineConfig.addStageWithoutValidityAssertion(StageConfigMother.stageConfig("local-stage"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(pipelineConfig.errors().on("stages"), is(String.format("Cannot add stages to pipeline '%s' which already references template '%s'", pipelineConfig.name(), templateName)));
assertThat(pipelineConfig.errors().on("template"), is(String.format("Cannot set template '%s' on pipeline '%s' because it already has stages defined", templateName, pipelineConfig.name())));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldCheckForUserPermissionBeforeUpdatingPipelineConfig() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
pipelineConfigService.updatePipelineConfig(new Username(new CaseInsensitiveString("unauthorized_user")), pipelineConfig, null, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.toString(), result.httpCode(), is(403));
assertThat(result.toString(), result.message().equals("Unauthorized to edit '" + pipelineConfig.name() + "' pipeline."), is(true));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolderBeforeUpdate.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolderBeforeUpdate.config));
}
@Test
public void shouldMapErrorsBackToScmMaterials() throws Exception {
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String scmid = "scmid";
saveScmMaterialToConfig(scmid);
PluggableSCMMaterialConfig scmMaterialConfig = new PluggableSCMMaterialConfig(scmid);
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.materialConfigs().add(scmMaterialConfig);
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(scmMaterialConfig.errors().on(PluggableSCMMaterialConfig.FOLDER), is("Destination directory is required when specifying multiple scm materials"));
assertThat(scmMaterialConfig.errors().on(PluggableSCMMaterialConfig.SCM_ID), is("Could not find plugin for scm-id: [scmid]."));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldMapErrorsBackToPackageMaterials() throws Exception {
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String packageid = "packageid";
saveScmMaterialToConfig(packageid);
PackageMaterialConfig packageMaterialConfig = new PackageMaterialConfig(packageid);
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.materialConfigs().add(packageMaterialConfig);
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.message(), is(String.format("Validations failed for pipeline '%s'. Error(s): [Validation failed.]. Please correct and resubmit.", pipelineConfig.name())));
assertThat(packageMaterialConfig.errors().on(PackageMaterialConfig.PACKAGE_ID), is("Could not find repository for given package id:[packageid]"));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldDeletePipelineConfig() throws Exception {
PipelineConfig pipeline = PipelineConfigMother.createPipelineConfigWithStages(UUID.randomUUID().toString(), "stage");
goConfigService.addPipeline(pipeline, "default");
assertTrue(goConfigService.hasPipelineNamed(pipeline.name()));
int pipelineCountBefore = goConfigService.getAllPipelineConfigs().size();
pipelineConfigService.deletePipelineConfig(user, pipeline, result);
assertTrue(result.isSuccessful());
int pipelineCountAfter = goConfigService.getAllPipelineConfigs().size();
assertThat(pipelineCountBefore - pipelineCountAfter, is(1));
assertFalse(goConfigService.hasPipelineNamed(pipeline.name()));
}
@Test
public void shouldNotDeleteThePipelineForUnauthorizedUsers() throws Exception {
goConfigService.security().securityAuthConfigs().add(new SecurityAuthConfig("file", "cd.go.authentication.passwordfile"));
int pipelineCountBefore = goConfigService.getAllPipelineConfigs().size();
assertTrue(goConfigService.hasPipelineNamed(pipelineConfig.name()));
CaseInsensitiveString userName = new CaseInsensitiveString("unauthorized-user");
pipelineConfigService.deletePipelineConfig(new Username(userName), pipelineConfig, result);
assertFalse(result.isSuccessful());
assertThat(result.message(), is(EntityType.Pipeline.forbiddenToDelete(pipelineConfig.name(), userName)));
assertThat(result.httpCode(), is(403));
int pipelineCountAfter = goConfigService.getAllPipelineConfigs().size();
assertThat(pipelineCountAfter, is(pipelineCountBefore));
assertTrue(goConfigService.hasPipelineNamed(pipelineConfig.name()));
}
@Test
public void shouldNotDeletePipelineConfigWhenItIsUsedInAnEnvironment() throws Exception {
BasicEnvironmentConfig env = new BasicEnvironmentConfig(new CaseInsensitiveString("Dev"));
PipelineConfig pipeline = PipelineConfigMother.createPipelineConfigWithStages(UUID.randomUUID().toString(), "stage");
goConfigService.addPipeline(pipeline, "default");
env.addPipeline(pipeline.name());
goConfigService.addEnvironment(env);
int pipelineCountBefore = goConfigService.getAllPipelineConfigs().size();
assertTrue(goConfigService.hasPipelineNamed(pipeline.name()));
pipelineConfigService.deletePipelineConfig(user, pipeline, result);
assertFalse(result.isSuccessful());
assertThat(result.message(), is("Cannot delete pipeline '" + pipeline.name() + "' as it is present in environment '" + env.name() + "'."));
assertThat(result.httpCode(), is(422));
int pipelineCountAfter = goConfigService.getAllPipelineConfigs().size();
assertThat(pipelineCountAfter, is(pipelineCountBefore));
assertTrue(goConfigService.hasPipelineNamed(pipeline.name()));
}
@Test
public void shouldNotDeletePipelineConfigWhenItHasDownstreamDependencies() throws Exception {
PipelineConfig dependency = GoConfigMother.createPipelineConfigWithMaterialConfig(new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
goConfigService.addPipeline(dependency, groupName);
int pipelineCountBefore = goConfigService.getAllPipelineConfigs().size();
assertTrue(goConfigService.hasPipelineNamed(pipelineConfig.name()));
pipelineConfigService.deletePipelineConfig(user, pipelineConfig, result);
assertFalse(result.isSuccessful());
assertThat(result.message(), is("Cannot delete pipeline '" + pipelineConfig.name() + "' as pipeline '" + dependency.name() + " (" + dependency.getOriginDisplayName() + ")' depends on it"));
assertThat(result.httpCode(), is(422));
int pipelineCountAfter = goConfigService.getAllPipelineConfigs().size();
assertThat(pipelineCountAfter, is(pipelineCountBefore));
assertTrue(goConfigService.hasPipelineNamed(pipelineConfig.name()));
}
@Test
public void shouldNotifyListenersWithPreprocessedConfigUponSuccessfulUpdate() {
final String pipelineName = UUID.randomUUID().toString();
final String templateName = UUID.randomUUID().toString();
final boolean[] listenerInvoked = {false};
setupPipelineWithTemplate(pipelineName, templateName);
PipelineConfig pipelineConfig1 = goConfigService.pipelineConfigNamed(new CaseInsensitiveString(pipelineName));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig1);
String md5 = CachedDigestUtils.md5Hex(xml);
EntityConfigChangedListener<PipelineConfig> pipelineConfigChangedListener = new EntityConfigChangedListener<PipelineConfig>() {
@Override
public void onConfigChange(CruiseConfig newCruiseConfig) {
}
@Override
public void onEntityConfigChange(PipelineConfig pipelineConfig) {
listenerInvoked[0] = true;
assertThat(pipelineConfig.first(), is(goConfigService.cruiseConfig().getTemplateByName(new CaseInsensitiveString(templateName)).first()));
}
};
goConfigService.register(pipelineConfigChangedListener);
PipelineConfig pipeline = PipelineConfigMother.pipelineConfigWithTemplate(pipelineName, templateName);
pipeline.setVariables(new EnvironmentVariablesConfig());
pipelineConfigService.updatePipelineConfig(user, pipeline, md5, new DefaultLocalizedOperationResult());
assertThat(listenerInvoked[0], is(true));
}
@Test
public void shouldNotifyListenersWithPreprocessedConfigUponSuccessfulCreate() {
final String pipelineName = UUID.randomUUID().toString();
final String templateName = UUID.randomUUID().toString();
final boolean[] listenerInvoked = {false};
setupPipelineWithTemplate(pipelineName, templateName);
EntityConfigChangedListener<PipelineConfig> pipelineConfigChangedListener = new EntityConfigChangedListener<PipelineConfig>() {
@Override
public void onConfigChange(CruiseConfig newCruiseConfig) {
}
@Override
public void onEntityConfigChange(PipelineConfig pipelineConfig) {
listenerInvoked[0] = true;
assertThat(pipelineConfig.first(), is(goConfigService.cruiseConfig().getTemplateByName(new CaseInsensitiveString(templateName)).first()));
}
};
goConfigService.register(pipelineConfigChangedListener);
PipelineConfig pipeline = PipelineConfigMother.pipelineConfigWithTemplate(UUID.randomUUID().toString(), templateName);
pipeline.setVariables(new EnvironmentVariablesConfig());
pipelineConfigService.createPipelineConfig(user, pipeline, new DefaultLocalizedOperationResult(), "group1");
assertThat(listenerInvoked[0], is(true));
}
private void setupPipelineWithTemplate(final String pipelineName, final String templateName) {
goConfigService.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
PipelineTemplateConfig template = PipelineTemplateConfigMother.createTemplate(templateName);
PipelineConfig pipeline = PipelineConfigMother.pipelineConfigWithTemplate(pipelineName, template.name().toString());
cruiseConfig.addTemplate(template);
cruiseConfig.addPipeline("group", pipeline);
return cruiseConfig;
}
});
}
@Test
public void shouldValidateMergedConfigForConfigChanges() throws Exception {
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(new CaseInsensitiveString(remoteDownstreamPipelineName)), is(true));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.getFirstStageConfig().setName(new CaseInsensitiveString("upstream_stage_renamed"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(false));
assertThat(pipelineConfig.errors().on("base"), is(String.format("Stage with name 'stage' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r1)", pipelineConfig.name())));
assertThat(result.message(), is(String.format("Validations failed for pipeline '%s'. Error(s): [Validation failed.]. Please correct and resubmit.", pipelineConfig.name())));
}
@Test
public void shouldFallbackToValidPartialsForConfigChanges() throws Exception {
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(new CaseInsensitiveString(remoteDownstreamPipelineName)), is(true));
String remoteInvalidPipeline = "remote_invalid_pipeline";
PartialConfig invalidPartial = PartialConfigMother.invalidPartial(remoteInvalidPipeline, new RepoConfigOrigin(repoConfig1, "repo1_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, invalidPartial);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(new CaseInsensitiveString(remoteInvalidPipeline)), is(false));
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(new CaseInsensitiveString(remoteDownstreamPipelineName)), is(true));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.getFirstStageConfig().getJobs().first().addTask(new ExecTask("executable", new Arguments(new Argument("foo")), "working"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(true));
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(new CaseInsensitiveString(remoteDownstreamPipelineName)), is(true));
assertThat(currentConfig.getAllPipelineNames().contains(new CaseInsensitiveString(remoteInvalidPipeline)), is(false));
}
@Test
public void shouldSaveWhenKnownPartialListIsTheSameAsValidPartialsAndValidationPassesForConfigChanges() throws Exception {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
DependencyMaterialConfig dependencyMaterialForRemotePipelineInConfigCache = goConfigService.getCurrentConfig().getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name());
assertThat(dependencyMaterialForRemotePipelineInConfigCache.getStageName(), is(new CaseInsensitiveString("stage")));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.setVariables(new EnvironmentVariablesConfig(asList(new EnvironmentVariableConfig("key", "value"))));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(true));
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(currentConfig.getPipelineConfigByName(pipelineConfig.name()).getVariables().contains(new EnvironmentVariableConfig("key", "value")), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
}
@Test
public void shouldNotSaveWhenKnownPartialsListIsTheSameAsValidPartialsAndPipelineValidationFailsForConfigChanges() throws Exception {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
DependencyMaterialConfig dependencyMaterialForRemotePipelineInConfigCache = goConfigService.getCurrentConfig().getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name());
assertThat(dependencyMaterialForRemotePipelineInConfigCache.getStageName(), is(new CaseInsensitiveString("stage")));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.getFirstStageConfig().setName(new CaseInsensitiveString("new_name"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(false));
assertThat(result.message(), is(String.format("Validations failed for pipeline '%s'. Error(s): [Validation failed.]. Please correct and resubmit.", pipelineConfig.name())));
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(cachedGoPartials.lastValidPartials().contains(partialConfig), is(true));
assertThat(cachedGoPartials.lastKnownPartials().contains(partialConfig), is(true));
assertThat(cachedGoPartials.lastKnownPartials().equals(cachedGoPartials.lastValidPartials()), is(true));
assertThat(currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name()).getStageName(), is(new CaseInsensitiveString("stage")));
assertThat(currentConfig.getPipelineConfigByName(pipelineConfig.name()).getFirstStageConfig().name(), is(new CaseInsensitiveString("stage")));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
}
@Test
public void shouldSaveWhenKnownNotEqualsValidPartialsAndPipelineValidationPassesWhenValidPartialsAreMergedToMain() throws Exception {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
partialConfig = PartialConfigMother.invalidPartial(remoteDownstreamPipeline.name().toString(), new RepoConfigOrigin(repoConfig1, "repo1_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, partialConfig);
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is("Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n- For Config Repo: url at repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.setVariables(new EnvironmentVariablesConfig(asList(new EnvironmentVariableConfig("key", "value"))));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(true));
currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(currentConfig.getPipelineConfigByName(pipelineConfig.name()).getVariables().contains(new EnvironmentVariableConfig("key", "value")), is(true));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is("Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n- For Config Repo: url at repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
}
@Test
public void shouldSaveWhenKnownNotEqualsValidPartialsAndPipelineValidationFailsWithValidPartialsButPassesWhenKnownPartialsAreMergedToMain() throws Exception {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
final CaseInsensitiveString upstreamStageRenamed = new CaseInsensitiveString("upstream_stage_renamed");
partialConfig = PartialConfigMother.pipelineWithDependencyMaterial("remote-downstream", new PipelineConfig(pipelineConfig.name(), pipelineConfig.materialConfigs(), new StageConfig(upstreamStageRenamed, new JobConfigs())), new RepoConfigOrigin(repoConfig1, "repo1_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, partialConfig);
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
DependencyMaterialConfig dependencyMaterialForRemotePipelineInConfigCache = currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name());
assertThat(dependencyMaterialForRemotePipelineInConfigCache.getStageName(), is(new CaseInsensitiveString("stage")));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is(String.format("Number of errors: 1+\n1. Stage with name 'upstream_stage_renamed' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r2);; \n- For Config Repo: url at repo1_r2", pipelineConfig.name())));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.getFirstStageConfig().setName(new CaseInsensitiveString("upstream_stage_renamed"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(true));
currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(cachedGoPartials.lastValidPartials().contains(partialConfig), is(true));
assertThat(cachedGoPartials.lastKnownPartials().contains(partialConfig), is(true));
assertThat(cachedGoPartials.lastKnownPartials().equals(cachedGoPartials.lastValidPartials()), is(true));
assertThat(currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name()).getStageName(), is(upstreamStageRenamed));
assertThat(currentConfig.getPipelineConfigByName(pipelineConfig.name()).getFirstStageConfig().name(), is(upstreamStageRenamed));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
}
@Test
public void shouldPerformFullValidationNotJustEntitySpecificIfMergingKnownPartialsAsOtherAspectsOfAKnownPartialMightBeInvalid() throws Exception {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
String independentRemotePipeline = "independent-pipeline";
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(new CaseInsensitiveString(independentRemotePipeline)), is(true));
//introduce an invalid change in the independent partial
PartialConfig invalidIndependentPartial = PartialConfigMother.invalidPartial(independentRemotePipeline, new RepoConfigOrigin(repoConfig2, "repo2_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig2, invalidIndependentPartial);
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig2.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo2_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig2.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo2_r2"));
assertThat(((RepoConfigOrigin) goConfigService.getCurrentConfig().getPipelineConfigByName(new CaseInsensitiveString(independentRemotePipeline)).getOrigin()).getRevision(), is("repo2_r1"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty(), is(true));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).get(0).getDescription(), is("Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n- For Config Repo: url2 at repo2_r2"));
final CaseInsensitiveString upstreamStageRenamed = new CaseInsensitiveString("upstream_stage_renamed");
partialConfig = PartialConfigMother.pipelineWithDependencyMaterial("remote-downstream", new PipelineConfig(pipelineConfig.name(), pipelineConfig.materialConfigs(), new StageConfig(upstreamStageRenamed, new JobConfigs())), new RepoConfigOrigin(repoConfig1, "repo1_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, partialConfig);
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
DependencyMaterialConfig dependencyMaterialForRemotePipelineInConfigCache = currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name());
assertThat(dependencyMaterialForRemotePipelineInConfigCache.getStageName(), is(new CaseInsensitiveString("stage")));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is(String.format("Number of errors: 1+\n1. Stage with name 'upstream_stage_renamed' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r2);; \n- For Config Repo: url at repo1_r2", pipelineConfig.name())));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).get(0).getDescription(), is("Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n- For Config Repo: url2 at repo2_r2"));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.getFirstStageConfig().setName(new CaseInsensitiveString("upstream_stage_renamed"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(false));
assertThat(result.message(), is(String.format("Validations failed for pipeline '%s'. " +
"Error(s): [Merged update operation failed on VALID 2 partials. Falling back to using LAST KNOWN 2 partials. " +
"Exception message was: [Validation failed. Stage with name 'stage' does not exist on pipeline '%s', " +
"it is being referred to from pipeline 'remote-downstream' (url at repo1_r1)]" +
System.lineSeparator() +
"Merged config update operation failed using fallback LAST KNOWN 2 partials. " +
"Exception message was: Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores and periods " +
"(however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n]. Please correct and resubmit.", pipelineConfig.name(), pipelineConfig.name())));
assertThat(ErrorCollector.getAllErrors(pipelineConfig).isEmpty(), is(true));
currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(cachedGoPartials.lastKnownPartials().equals(cachedGoPartials.lastValidPartials()), is(false));
assertThat(currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name()).getStageName(), is(new CaseInsensitiveString("stage")));
assertThat(currentConfig.getPipelineConfigByName(pipelineConfig.name()).getFirstStageConfig().name(), is(new CaseInsensitiveString("stage")));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(new CaseInsensitiveString(independentRemotePipeline)).getOrigin()).getRevision(), is("repo2_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig2.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo2_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig2.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo2_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is(String.format("Number of errors: 1+\n1. Stage with name 'upstream_stage_renamed' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r2);; \n- For Config Repo: url at repo1_r2", pipelineConfig.name())));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).get(0).getDescription(), is("Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n- For Config Repo: url2 at repo2_r2"));
}
@Test
public void shouldNotSaveWhenKnownNotEqualsValidPartialsAndPipelineValidationFailsWithValidPartialsAsWellAsKnownPartialsMergedToMain() throws Exception {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
final CaseInsensitiveString upstreamStageRenamed = new CaseInsensitiveString("upstream_stage_renamed");
partialConfig = PartialConfigMother.pipelineWithDependencyMaterial("remote-downstream", new PipelineConfig(pipelineConfig.name(), pipelineConfig.materialConfigs(), new StageConfig(upstreamStageRenamed, new JobConfigs())), new RepoConfigOrigin(repoConfig1, "repo1_r2"));
goPartialConfig.onSuccessPartialConfig(repoConfig1, partialConfig);
CruiseConfig currentConfig = goConfigService.getCurrentConfig();
DependencyMaterialConfig dependencyMaterialForRemotePipelineInConfigCache = currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name());
assertThat(dependencyMaterialForRemotePipelineInConfigCache.getStageName(), is(new CaseInsensitiveString("stage")));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is(String.format("Number of errors: 1+\n1. Stage with name 'upstream_stage_renamed' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r2);; \n- For Config Repo: url at repo1_r2", pipelineConfig.name())));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
pipelineConfig.getFirstStageConfig().setName(new CaseInsensitiveString("new_name"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(false));
assertThat(result.message(), is(String.format("Validations failed for pipeline '%s'. " +
"Error(s): [Merged update operation failed on VALID 2 partials. Falling back to using LAST KNOWN 2 partials. " +
"Exception message was: [Validation failed. Stage with name 'stage' does not exist on pipeline '%s', " +
"it is being referred to from pipeline 'remote-downstream' (url at repo1_r1)]" +
System.lineSeparator() +
"Merged config update operation failed using fallback LAST KNOWN 2 partials. " +
"Exception message was: Number of errors: 1+\n1. Stage with name 'upstream_stage_renamed' does not exist on pipeline " +
"'%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r2);; \n]. " +
"Please correct and resubmit.", pipelineConfig.name(), pipelineConfig.name(), pipelineConfig.name())));
currentConfig = goConfigService.getCurrentConfig();
assertThat(currentConfig.getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getValid(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.getKnown(repoConfig1.getMaterialConfig().getFingerprint()).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).materialConfigs().findDependencyMaterial(pipelineConfig.name()).getStageName(), is(new CaseInsensitiveString("stage")));
assertThat(currentConfig.getPipelineConfigByName(pipelineConfig.name()).getFirstStageConfig().name(), is(new CaseInsensitiveString("stage")));
assertThat(((RepoConfigOrigin) currentConfig.getPipelineConfigByName(remoteDownstreamPipeline.name()).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin()).getRevision(), is("repo1_r1"));
assertThat(((RepoConfigOrigin) cachedGoPartials.lastKnownPartials().get(0).getOrigin()).getRevision(), is("repo1_r2"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size(), is(1));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage(), is("Invalid Merged Configuration"));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription(), is(String.format("Number of errors: 1+\n1. Stage with name 'upstream_stage_renamed' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (url at repo1_r2);; \n- For Config Repo: url at repo1_r2", pipelineConfig.name())));
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty(), is(true));
}
@Test
public void shouldUpdateMergedConfigForEditUponSaveOfEntitiesDefinedInMainXmlUsingAPIs() {
PipelineConfig remoteDownstreamPipeline = partialConfig.getGroups().first().getPipelines().get(0);
assertThat(pipelineConfigService.getPipelineConfig(remoteDownstreamPipelineName), is(not(nullValue())));
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(goConfigService.getConfigForEditing().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(false));
assertThat(goConfigService.getMergedConfigForEditing().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
String md5 = CachedDigestUtils.md5Hex(new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig));
pipelineConfig.setVariables(new EnvironmentVariablesConfig(asList(new EnvironmentVariableConfig("key", "value"))));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, md5, result);
assertThat(result.isSuccessful(), is(true));
assertThat(pipelineConfigService.getPipelineConfig(remoteDownstreamPipelineName), is(not(nullValue())));
assertThat(goConfigService.getCurrentConfig().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
assertThat(goConfigService.getConfigForEditing().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(false));
assertThat(goConfigService.getMergedConfigForEditing().getAllPipelineNames().contains(remoteDownstreamPipeline.name()), is(true));
}
private void saveTemplateWithParamToConfig(CaseInsensitiveString templateName) throws Exception {
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("job"));
ExecTask task = new ExecTask();
task.setCommand("ls");
jobConfig.addTask(task);
jobConfig.addVariable("ENV_VAR", "#{SOME_PARAM}");
final PipelineTemplateConfig template = new PipelineTemplateConfig(templateName, new StageConfig(new CaseInsensitiveString("stage"), new JobConfigs(jobConfig)));
CruiseConfig cruiseConfig = goConfigDao.loadConfigHolder().configForEdit;
cruiseConfig.addTemplate(template);
saveConfig(cruiseConfig);
}
private void saveScmMaterialToConfig(String id) throws Exception {
SCM scm = new SCM(id, new PluginConfiguration(id, "1.0"), new Configuration(new ConfigurationProperty(new ConfigurationKey("key"), new ConfigurationValue("value"))));
scm.setName(id);
CruiseConfig cruiseConfig = goConfigDao.loadConfigHolder().configForEdit;
cruiseConfig.getSCMs().add(scm);
saveConfig(cruiseConfig);
}
private void saveConfig(CruiseConfig cruiseConfig) throws Exception {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
new MagicalGoConfigXmlWriter(configCache, registry).write(cruiseConfig, buffer, false);
}
}
|
|
package engine;
import java.awt.Canvas;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.DisplayMode;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.Toolkit;
import java.awt.image.BufferStrategy;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
/********************************
*
* @author Stefan Lazarevic
*
* This is my first public project
* In this project i will try to create a simple game engine for all of you game developers there
* All of my code is free to use, but an attribution would be very nice in your projects.
*
********************************/
public class ScreenManager extends Canvas implements Runnable{
private static final long serialVersionUID = 1L;
private JFrame frame;
private String TITLE;
private GraphicsDevice device;
private DisplayMode displayMode;
private boolean fullScreen;
private double graphicWidth;
private double graphicHeight;
public static int width;
public static int height;
private Thread game_thread;
private boolean gameRunning;
private float FPS;
private Float currentFPS = 60f;
private boolean showFPS;
public GameStateManager gsm;
public ScreenManager(){
GraphicsEnvironment environment = GraphicsEnvironment.getLocalGraphicsEnvironment();
this.device = environment.getDefaultScreenDevice();
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
graphicWidth = screenSize.getWidth();
graphicHeight = screenSize.getHeight();
addKeyListener(Keyboard.keyboard);
gsm = new GameStateManager();
}
public void setScreenEnvironment(String TITLE, int WIDTH, int HEIGHT, int BITDEPTH, boolean FULLSCREEN){
if(WIDTH <= (int)graphicWidth && HEIGHT <= (int)graphicHeight){
this.TITLE = TITLE;
this.fullScreen = FULLSCREEN;
frame = new JFrame();
frame.setTitle(TITLE);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setResizable(false);
if(FULLSCREEN){
frame.setUndecorated(true);
frame.setIgnoreRepaint(true);
displayMode = new DisplayMode(WIDTH, HEIGHT, BITDEPTH, DisplayMode.REFRESH_RATE_UNKNOWN);
if (displayMode != null && device.isDisplayChangeSupported()) {
device.setDisplayMode(displayMode);
}
device.setFullScreenWindow(frame);
width = device.getFullScreenWindow().getWidth();
height = device.getFullScreenWindow().getHeight();
frame.add(this);
}
else{
width = WIDTH;
height = HEIGHT;
setPreferredSize(new Dimension(WIDTH, HEIGHT));
frame.add(this);
frame.pack();
frame.setLocationRelativeTo(null);
}
setFocusable(true);
frame.setVisible(true);
requestFocus();
}
else{
JOptionPane.showMessageDialog(frame, "Resolution is not supported, Please make sure to set resolution more than " + WIDTH + "x" + HEIGHT + " in order to play this game.");
System.exit(0);
}
}
@Override
public void run() {
double nsPerUpdate = 1000000000.0 / FPS;
long lastTime = System.nanoTime();
long timer = System.currentTimeMillis();
double unprocessed = 0.0;
int fpsCount = 0;
int updateCount = 0;
boolean canPaint = false;
while(gameRunning){
long now = System.nanoTime();
unprocessed += (now - lastTime) / nsPerUpdate;
lastTime = now;
if(unprocessed >= 1.0){
update();
unprocessed--;
updateCount++;
canPaint = true;
}
else canPaint = false;
try{
Thread.sleep(5);
}
catch(InterruptedException e){
e.printStackTrace();
}
if(canPaint){
paint(fpsCount, updateCount);
fpsCount++;
}
if(System.currentTimeMillis() - 1000 > timer){
timer += 1000;
setCurrentFPS(fpsCount);
if(showFPS && !fullScreen) frame.setTitle(TITLE + " FPS >> " + fpsCount + " xxx UPS >> " + updateCount);
fpsCount = 0;
updateCount = 0;
}
}
}
public void update(){
Keyboard.poll();
gsm.update();
}
public void paint(int FPS, int UPS){
BufferStrategy bs = getBufferStrategy();
if(bs == null){
createBufferStrategy(3);
return;
}
Graphics g = bs.getDrawGraphics();
/************************ PAINT ALL HERE **************************/
g.setColor(Color.black);
g.fillRect(0, 0, this.getWidth(), this.getHeight());
gsm.paint(g);
if(showFPS && fullScreen){
g.setColor(Color.WHITE);
g.setFont(new Font("Dialog", Font.BOLD, 24));
g.drawString(getCurrentFPS(), 34, 34);
}
/******************************************************************/
g.dispose();
bs.show();
}
public void gameStart(float FPS, boolean showFPS){
game_thread = new Thread(this, "Screen Manager Main Thread");
if(game_thread != null){
gameRunning = true;
game_thread.start();
}
setShowFPS(showFPS);
setFPS(FPS);
}
public float getFPS() {
return FPS;
}
public void setFPS(float fPS) {
FPS = fPS;
}
public void setShowFPS(boolean showFPS) {
this.showFPS = showFPS;
}
public String getCurrentFPS() {
return currentFPS.toString();
}
public void setCurrentFPS(float currentFPS) {
this.currentFPS = currentFPS;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.property;
import static com.google.common.base.Preconditions.checkState;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_CONTENT_NODE_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexSelectionPolicy;
import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
import org.apache.jackrabbit.oak.spi.query.Cursor;
import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction;
import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
/**
* Provides a QueryIndex that does lookups against a property index
*
* <p>
* To define a property index on a subtree you have to add an <code>oak:index</code> node.
* <br>
* Next (as a child node) follows the index definition node that:
* <ul>
* <li>must be of type <code>oak:QueryIndexDefinition</code></li>
* <li>must have the <code>type</code> property set to <b><code>property</code></b></li>
* <li>contains the <code>propertyNames</code> property that indicates what property will be stored in the index</li>
* </ul>
* </p>
* <p>
* Optionally you can specify
* <ul>
* <li> a uniqueness constraint on a property index by setting the <code>unique</code> flag to <code>true</code></li>
* <li> that the property index only applies to a certain node type by setting the <code>declaringNodeTypes</code> property</li>
* </ul>
* </p>
* <p>
* Notes:
* <ul>
* <li> <code>propertyNames</code> can be a list of properties, and it is optional.in case it is missing, the node name will be used as a property name reference value</li>
* <li> <code>reindex</code> is a property that when set to <code>true</code>, triggers a full content reindex.</li>
* </ul>
* </p>
*
* <pre>
* <code>
* {
* NodeBuilder index = root.child("oak:index");
* index.child("uuid")
* .setProperty("jcr:primaryType", "oak:QueryIndexDefinition", Type.NAME)
* .setProperty("type", "property")
* .setProperty("propertyNames", "jcr:uuid")
* .setProperty("declaringNodeTypes", "mix:referenceable")
* .setProperty("unique", true)
* .setProperty("reindex", true);
* }
* </code>
* </pre>
*
* @see QueryIndex
* @see PropertyIndexLookup
*/
class PropertyIndex implements QueryIndex {
private static final String PROPERTY = "property";
private static final Logger LOG = LoggerFactory.getLogger(PropertyIndex.class);
private final MountInfoProvider mountInfoProvider;
/**
* Cached property index plan
*/
private PropertyIndexPlan cachedPlan;
PropertyIndex(MountInfoProvider mountInfoProvider) {
this.mountInfoProvider = mountInfoProvider;
}
private PropertyIndexPlan getPlan(NodeState root, Filter filter) {
// Reuse cached plan if the filter is the same (which should always be the case). The filter is compared as a
// string because it would not be possible to use its equals method since the preparing flag would be different
// and creating a separate isSimilar method is not worth the effort since it would not be used anymore once the
// PropertyIndex has been refactored to an AdvancedQueryIndex (which will make the plan cache obsolete).
PropertyIndexPlan plan = this.cachedPlan;
if (plan != null && plan.getFilter().toString().equals(filter.toString())) {
return plan;
} else {
plan = createPlan(root, filter, mountInfoProvider);
this.cachedPlan = plan;
return plan;
}
}
private static PropertyIndexPlan createPlan(NodeState root, Filter filter,
MountInfoProvider mountInfoProvider) {
PropertyIndexPlan bestPlan = null;
// TODO support indexes on a path
// currently, only indexes on the root node are supported
NodeState state = root.getChildNode(INDEX_DEFINITIONS_NAME);
for (ChildNodeEntry entry : state.getChildNodeEntries()) {
NodeState definition = entry.getNodeState();
if (wrongIndex(entry, filter, root)) {
continue;
}
if (PROPERTY.equals(definition.getString(TYPE_PROPERTY_NAME))
&& definition.hasChildNode(INDEX_CONTENT_NODE_NAME)) {
PropertyIndexPlan plan = new PropertyIndexPlan(
entry.getName(), root, definition, filter, mountInfoProvider);
if (plan.getCost() != Double.POSITIVE_INFINITY) {
LOG.debug("property cost for {} is {}",
plan.getName(), plan.getCost());
if (bestPlan == null || plan.getCost() < bestPlan.getCost()) {
bestPlan = plan;
// Stop comparing if the costs are the minimum
if (plan.getCost() == PropertyIndexPlan.COST_OVERHEAD) {
break;
}
}
}
}
}
return bestPlan;
}
private static boolean wrongIndex(ChildNodeEntry entry, Filter filter, NodeState root) {
// REMARK: similar code is used in oak-lucene, IndexPlanner
// skip index if "option(index ...)" doesn't match
NodeState definition = entry.getNodeState();
if (!isEnabled(definition, root)) {
return true;
}
PropertyRestriction indexName = filter.getPropertyRestriction(IndexConstants.INDEX_NAME_OPTION);
boolean wrong = false;
if (indexName != null && indexName.first != null) {
String name = indexName.first.getValue(Type.STRING);
String thisName = entry.getName();
if (thisName.equals(name)) {
// index name specified, and matches
return false;
}
wrong = true;
}
PropertyRestriction indexTag = filter.getPropertyRestriction(IndexConstants.INDEX_TAG_OPTION);
if (indexTag != null && indexTag.first != null) {
// index tag specified
String[] tags = getOptionalStrings(definition, IndexConstants.INDEX_TAGS);
if (tags == null) {
// no tag
return true;
}
String tag = indexTag.first.getValue(Type.STRING);
for(String t : tags) {
if (t.equals(tag)) {
// tag matches
return false;
}
}
// no tag matches
return true;
} else if (IndexSelectionPolicy.TAG.equals(definition.getString(IndexConstants.INDEX_SELECTION_POLICY))) {
// index tags are not specified in query, but required by the "tag" index selection policy
return true;
}
// no tag specified
return wrong;
}
private static boolean isEnabled(NodeState definition, NodeState root) {
String useIfExists = definition.getString(IndexConstants.USE_IF_EXISTS);
if (useIfExists == null) {
return true;
}
if (!PathUtils.isValid(useIfExists)) {
return false;
}
NodeState nodeState = root;
for (String element : PathUtils.elements(useIfExists)) {
if (element.startsWith("@")) {
return nodeState.hasProperty(element.substring(1));
}
nodeState = nodeState.getChildNode(element);
if (!nodeState.exists()) {
return false;
}
}
return true;
}
private static String[] getOptionalStrings(NodeState defn, String propertyName) {
PropertyState ps = defn.getProperty(propertyName);
if (ps != null) {
return Iterables.toArray(ps.getValue(Type.STRINGS), String.class);
}
return null;
}
//--------------------------------------------------------< QueryIndex >--
@Override
public double getMinimumCost() {
return PropertyIndexPlan.COST_OVERHEAD;
}
@Override
public String getIndexName() {
return PROPERTY;
}
@Override
public String getIndexName(Filter filter, NodeState root) {
PropertyIndexPlan plan = getPlan(root, filter);
return plan == null ? null : plan.getName();
}
@Override
public double getCost(Filter filter, NodeState root) {
if (filter.getFullTextConstraint() != null) {
// not an appropriate index for full-text search
return Double.POSITIVE_INFINITY;
}
if (filter.containsNativeConstraint()) {
// not an appropriate index for native search
return Double.POSITIVE_INFINITY;
}
if (filter.getPropertyRestrictions().isEmpty()) {
// not an appropriate index for no property restrictions & selector constraints
return Double.POSITIVE_INFINITY;
}
PropertyIndexPlan plan = getPlan(root, filter);
if (plan != null) {
return plan.getCost();
} else {
return Double.POSITIVE_INFINITY;
}
}
@Override
public Cursor query(Filter filter, NodeState root) {
PropertyIndexPlan plan = getPlan(root, filter);
checkState(plan != null,
"Property index is used even when no index"
+ " is available for filter " + filter);
return plan.execute();
}
@Override
public String getPlan(Filter filter, NodeState root) {
PropertyIndexPlan plan = getPlan(root, filter);
if (plan != null) {
return plan.toString();
} else {
return "property index not applicable";
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.cloud;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.CamelContext;
import org.apache.camel.Expression;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.NoFactoryAvailableException;
import org.apache.camel.cloud.ServiceCallConstants;
import org.apache.camel.cloud.ServiceExpressionFactory;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.language.ExpressionDefinition;
import org.apache.camel.spi.Configurer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.PropertyBindingSupport;
@Metadata(label = "routing,cloud")
@XmlRootElement(name = "serviceExpression")
@XmlAccessorType(XmlAccessType.FIELD)
@Configurer(extended = true)
public class ServiceCallExpressionConfiguration extends ServiceCallConfiguration implements ServiceExpressionFactory {
@XmlTransient
private final ServiceCallDefinition parent;
@XmlTransient
private final String factoryKey;
@XmlAttribute
@Metadata(defaultValue = ServiceCallConstants.SERVICE_HOST)
private String hostHeader = ServiceCallConstants.SERVICE_HOST;
@XmlAttribute
@Metadata(defaultValue = ServiceCallConstants.SERVICE_PORT)
private String portHeader = ServiceCallConstants.SERVICE_PORT;
@XmlElementRef(required = false)
private ExpressionDefinition expressionType;
@XmlTransient
private Expression expression;
public ServiceCallExpressionConfiguration() {
this(null, null);
}
public ServiceCallExpressionConfiguration(ServiceCallDefinition parent, String factoryKey) {
this.parent = parent;
this.factoryKey = factoryKey;
}
public ServiceCallDefinition end() {
return this.parent;
}
public ProcessorDefinition<?> endParent() {
return this.parent.end();
}
// *************************************************************************
//
// *************************************************************************
@Override
public ServiceCallServiceChooserConfiguration property(String key, String value) {
return (ServiceCallServiceChooserConfiguration) super.property(key, value);
}
public String getHostHeader() {
return hostHeader;
}
/**
* The header that holds the service host information, default ServiceCallConstants.SERVICE_HOST
*/
public void setHostHeader(String hostHeader) {
this.hostHeader = hostHeader;
}
public String getPortHeader() {
return portHeader;
}
/**
* The header that holds the service port information, default ServiceCallConstants.SERVICE_PORT
*/
public void setPortHeader(String portHeader) {
this.portHeader = portHeader;
}
public ExpressionDefinition getExpressionType() {
return expressionType;
}
public void setExpressionType(ExpressionDefinition expressionType) {
this.expressionType = expressionType;
}
public Expression getExpression() {
return expression;
}
public void setExpression(Expression expression) {
this.expression = expression;
}
/**
* The header that holds the service host information, default ServiceCallConstants.SERVICE_HOST
*/
public ServiceCallExpressionConfiguration hostHeader(String hostHeader) {
setHostHeader(hostHeader);
return this;
}
/**
* The header that holds the service port information, default ServiceCallConstants.SERVICE_PORT
*/
public ServiceCallExpressionConfiguration portHeader(String portHeader) {
setPortHeader(portHeader);
return this;
}
public ServiceCallExpressionConfiguration expressionType(ExpressionDefinition expressionType) {
setExpressionType(expressionType);
return this;
}
public ServiceCallExpressionConfiguration expression(Expression expression) {
setExpression(expression);
return this;
}
// *************************************************************************
// Factory
// *************************************************************************
@Override
public Expression newInstance(CamelContext camelContext) throws Exception {
Expression answer = getExpression();
if (answer != null) {
return answer;
}
ExpressionDefinition expressionType = getExpressionType();
if (expressionType != null) {
return expressionType.createExpression(camelContext);
}
if (factoryKey != null) {
// First try to find the factory from the registry.
ServiceExpressionFactory factory
= CamelContextHelper.lookup(camelContext, factoryKey, ServiceExpressionFactory.class);
if (factory != null) {
// If a factory is found in the registry do not re-configure it
// as
// it should be pre-configured.
answer = factory.newInstance(camelContext);
} else {
Class<?> type;
try {
// Then use Service factory.
type = camelContext.adapt(ExtendedCamelContext.class)
.getFactoryFinder(ServiceCallDefinitionConstants.RESOURCE_PATH).findClass(factoryKey).orElse(null);
} catch (Exception e) {
throw new NoFactoryAvailableException(ServiceCallDefinitionConstants.RESOURCE_PATH + factoryKey, e);
}
if (type != null) {
if (ServiceExpressionFactory.class.isAssignableFrom(type)) {
factory = (ServiceExpressionFactory) camelContext.getInjector().newInstance(type, false);
} else {
throw new IllegalArgumentException(
"Resolving Expression: " + factoryKey
+ " detected type conflict: Not a ExpressionFactory implementation. Found: "
+ type.getName());
}
}
try {
Map<String, Object> parameters = getConfiguredOptions(camelContext, this);
parameters.replaceAll((k, v) -> {
if (v instanceof String) {
try {
v = camelContext.resolvePropertyPlaceholders((String) v);
} catch (Exception e) {
throw new IllegalArgumentException(
String.format("Exception while resolving %s (%s)", k, v.toString()), e);
}
}
return v;
});
// Convert properties to Map<String, String>
Map<String, String> map = getPropertiesAsMap(camelContext);
if (map != null && !map.isEmpty()) {
parameters.put("properties", map);
}
postProcessFactoryParameters(camelContext, parameters);
PropertyBindingSupport.build().bind(camelContext, factory, parameters);
answer = factory.newInstance(camelContext);
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
}
return answer;
}
}
|
|
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.util;
import jodd.core.JoddCore;
import jodd.io.FileUtil;
import jodd.io.StreamUtil;
import java.io.FileInputStream;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.io.InputStream;
import java.io.IOException;
import java.io.File;
import java.net.URLConnection;
import java.util.Set;
import java.util.TreeSet;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
/**
* Utilities to manipulate class path, define and find classes etc.
*/
public class ClassLoaderUtil {
// ---------------------------------------------------------------- default class loader
/**
* Returns default class loader. By default, it is {@link #getContextClassLoader() threads context class loader}.
* If this one is <code>null</code>, then class loader of the <b>caller class</b> is returned.
*/
public static ClassLoader getDefaultClassLoader() {
ClassLoader cl = getContextClassLoader();
if (cl == null) {
Class callerClass = ReflectUtil.getCallerClass(2);
cl = callerClass.getClassLoader();
}
return cl;
}
/**
* Returns thread context class loader.
*/
public static ClassLoader getContextClassLoader() {
return Thread.currentThread().getContextClassLoader();
}
/**
* Sets the thread context class loader.
*/
public static void setContextClassLoader(ClassLoader classLoader) {
Thread.currentThread().setContextClassLoader(classLoader);
}
/**
* Returns system class loader.
*/
public static ClassLoader getSystemClassLoader() {
return ClassLoader.getSystemClassLoader();
}
// ---------------------------------------------------------------- add class path
/**
* Adds additional file or path to classpath during runtime.
* @see #addUrlToClassPath(java.net.URL, ClassLoader)
*/
public static void addFileToClassPath(File path, ClassLoader classLoader) {
try {
addUrlToClassPath(FileUtil.toURL(path), classLoader);
} catch (MalformedURLException muex) {
throw new IllegalArgumentException("Invalid path: " + path, muex);
}
}
/**
* Adds the content pointed by the URL to the classpath during runtime.
* Uses reflection since <code>addURL</code> method of
* <code>URLClassLoader</code> is protected.
*/
public static void addUrlToClassPath(URL url, ClassLoader classLoader) {
try {
ReflectUtil.invokeDeclared(URLClassLoader.class, classLoader, "addURL",
new Class[]{URL.class}, new Object[]{url});
} catch (Exception ex) {
throw new IllegalArgumentException("Add URL failed: " + url, ex);
}
}
// ---------------------------------------------------------------- define class
/**
* Defines a class from byte array into the system class loader.
* @see #defineClass(String, byte[], ClassLoader)
*/
public static Class defineClass(String className, byte[] classData) {
return defineClass(className, classData, getDefaultClassLoader());
}
/**
* Defines a class from byte array into the specified class loader.
* Warning: this is a <b>hack</b>!
* @param className optional class name, may be <code>null</code>
* @param classData bytecode data
* @param classLoader classloader that will load class
*/
public static Class defineClass(String className, byte[] classData, ClassLoader classLoader) {
try {
return (Class) ReflectUtil.invokeDeclared(ClassLoader.class, classLoader, "defineClass",
new Class[] {String.class, byte[].class, int.class, int.class},
new Object[] {className, classData, Integer.valueOf(0), Integer.valueOf(classData.length)});
} catch (Throwable th) {
throw new RuntimeException("Define class failed: " + className, th);
}
}
// ---------------------------------------------------------------- find class
/**
* @see #findClass(String, java.net.URL[], ClassLoader)
*/
public static Class findClass(String className, File[] classPath, ClassLoader parent) {
URL[] urls = new URL[classPath.length];
for (int i = 0; i < classPath.length; i++) {
File file = classPath[i];
try {
urls[i] = FileUtil.toURL(file);
} catch (MalformedURLException ignore) {
}
}
return findClass(className, urls, null);
}
/**
* Finds and loads class on classpath even if it was already loaded.
* @param className class name to find
* @param classPath classpath
* @param parent optional parent class loader, may be <code>null</code>
*/
public static Class findClass(String className, URL[] classPath, ClassLoader parent) {
URLClassLoader tempClassLoader = parent != null ? new URLClassLoader(classPath, parent) : new URLClassLoader(classPath);
try {
return (Class) ReflectUtil.invokeDeclared(URLClassLoader.class, tempClassLoader, "findClass",
new Class[] {String.class},
new Object[] {className});
} catch (Throwable th) {
throw new RuntimeException("Class not found: " + className, th);
}
}
// ---------------------------------------------------------------- classpath
private static final String[] MANIFESTS = {"Manifest.mf", "manifest.mf", "MANIFEST.MF"};
/**
* Finds <b>tools.jar</b>. Returns <code>null</code> if does not exist.
*/
public static File findToolsJar() {
String tools = new File(SystemUtil.getJavaHome()).getAbsolutePath() + File.separatorChar + "lib" + File.separatorChar + "tools.jar";
File toolsFile = new File(tools);
if (toolsFile.exists()) {
return toolsFile;
}
return null;
}
/**
* Returns classpath item manifest or <code>null</code> if not found.
*/
public static Manifest getClasspathItemManifest(File classpathItem) {
Manifest manifest = null;
if (classpathItem.isFile()) {
FileInputStream fis = null;
try {
fis = new FileInputStream(classpathItem);
JarFile jar = new JarFile(classpathItem);
manifest = jar.getManifest();
} catch (IOException ignore) {
}
finally {
StreamUtil.close(fis);
}
} else {
File metaDir = new File(classpathItem, "META-INF");
File manifestFile = null;
if (metaDir.isDirectory()) {
for (String m : MANIFESTS) {
File mFile = new File(metaDir, m);
if (mFile.isFile() == true) {
manifestFile = mFile;
break;
}
}
}
if (manifestFile != null) {
FileInputStream fis = null;
try {
fis = new FileInputStream(manifestFile);
manifest = new Manifest(fis);
} catch (IOException ignore) {
}
finally {
StreamUtil.close(fis);
}
}
}
return manifest;
}
/**
* Returns base folder for classpath item. If item is a (jar) file,
* its parent is returned. If item is a directory, its name is returned.
*/
public static String getClasspathItemBaseDir(File classpathItem) {
String base;
if (classpathItem.isFile()) {
base = classpathItem.getParent();
} else {
base = classpathItem.toString();
}
return base;
}
/**
* Returns default classpath using
* {@link #getDefaultClassLoader() default classloader}.
*/
public static File[] getDefaultClasspath() {
return getDefaultClasspath(getDefaultClassLoader());
}
/**
* Returns default class path from all available <code>URLClassLoader</code>
* in classloader hierarchy. The following is added to the classpath list:
* <ul>
* <li>file URLs from <code>URLClassLoader</code> (other URL protocols are ignored)</li>
* <li>inner entries from containing <b>manifest</b> files (if exist)</li>
* <li>bootstrap classpath</li>
* </ul>
*/
public static File[] getDefaultClasspath(ClassLoader classLoader) {
Set<File> classpaths = new TreeSet<>();
while (classLoader != null) {
if (classLoader instanceof URLClassLoader) {
URL[] urls = ((URLClassLoader) classLoader).getURLs();
for (URL u : urls) {
File f = FileUtil.toFile(u);
if ((f != null) && f.exists()) {
try {
f = f.getCanonicalFile();
boolean newElement = classpaths.add(f);
if (newElement) {
addInnerClasspathItems(classpaths, f);
}
} catch (IOException ignore) {
}
}
}
}
classLoader = classLoader.getParent();
}
String bootstrap = SystemUtil.getSunBoothClassPath();
if (bootstrap != null) {
String[] bootstrapFiles = StringUtil.splitc(bootstrap, File.pathSeparatorChar);
for (String bootstrapFile: bootstrapFiles) {
File f = new File(bootstrapFile);
if (f.exists()) {
try {
f = f.getCanonicalFile();
boolean newElement = classpaths.add(f);
if (newElement) {
addInnerClasspathItems(classpaths, f);
}
} catch (IOException ignore) {
}
}
}
}
File[] result = new File[classpaths.size()];
return classpaths.toArray(result);
}
private static void addInnerClasspathItems(Set<File> classpaths, File item) {
Manifest manifest = getClasspathItemManifest(item);
if (manifest == null) {
return;
}
Attributes attributes = manifest.getMainAttributes();
if (attributes == null) {
return;
}
String s = attributes.getValue(Attributes.Name.CLASS_PATH);
if (s == null) {
return;
}
String base = getClasspathItemBaseDir(item);
String[] tokens = StringUtil.splitc(s, ' ');
for (String t : tokens) {
File file;
// try file with the base path
try {
file = new File(base, t);
file = file.getCanonicalFile();
if (file.exists() == false) {
file = null;
}
} catch (Exception ignore) {
file = null;
}
if (file == null) {
// try file with absolute path
try {
file = new File(t);
file = file.getCanonicalFile();
if (file.exists() == false) {
file = null;
}
} catch (Exception ignore) {
file = null;
}
}
if (file == null) {
// try the URL
try {
URL url = new URL(t);
file = new File(url.getFile());
file = file.getCanonicalFile();
if (file.exists() == false) {
file = null;
}
} catch (Exception ignore) {
file = null;
}
}
if (file != null && file.exists() == true) {
classpaths.add(file);
}
}
}
// ---------------------------------------------------------------- get resource
/**
* Retrieves given resource as URL.
* @see #getResourceUrl(String, ClassLoader)
*/
public static URL getResourceUrl(String resourceName) {
return getResourceUrl(resourceName, null);
}
/**
* Retrieves given resource as URL. Resource is always absolute and may
* starts with a slash character.
* <p>
* Resource will be loaded using class loaders in the following order:
* <ul>
* <li>{@link Thread#getContextClassLoader() Thread.currentThread().getContextClassLoader()}</li>
* <li>{@link Class#getClassLoader() ClassLoaderUtil.class.getClassLoader()}</li>
* <li>if <code>callingClass</code> is provided: {@link Class#getClassLoader() callingClass.getClassLoader()}</li>
* </ul>
*/
public static URL getResourceUrl(String resourceName, ClassLoader classLoader) {
if (resourceName.startsWith("/")) {
resourceName = resourceName.substring(1);
}
URL resourceUrl;
// try #1 - using provided class loader
if (classLoader != null) {
resourceUrl = classLoader.getResource(resourceName);
if (resourceUrl != null) {
return resourceUrl;
}
}
// try #2 - using thread class loader
ClassLoader currentThreadClassLoader = Thread.currentThread().getContextClassLoader();
if ((currentThreadClassLoader != null) && (currentThreadClassLoader != classLoader)) {
resourceUrl = currentThreadClassLoader.getResource(resourceName);
if (resourceUrl != null) {
return resourceUrl;
}
}
// try #3 - using caller classloader, similar as Class.forName()
Class callerClass = ReflectUtil.getCallerClass(2);
ClassLoader callerClassLoader = callerClass.getClassLoader();
if ((callerClassLoader != classLoader) && (callerClassLoader != currentThreadClassLoader)) {
resourceUrl = callerClassLoader.getResource(resourceName);
if (resourceUrl != null) {
return resourceUrl;
}
}
return null;
}
// ---------------------------------------------------------------- get resource file
/**
* Retrieves resource as file.
* @see #getResourceFile(String)
*/
public static File getResourceFile(String resourceName) {
return getResourceFile(resourceName, null);
}
/**
* Retrieves resource as file. Resource is retrieved as {@link #getResourceUrl(String, ClassLoader) URL},
* than it is converted to URI so it can be used by File constructor.
*/
public static File getResourceFile(String resourceName, ClassLoader classLoader) {
try {
return new File(getResourceUrl(resourceName, classLoader).toURI());
} catch (URISyntaxException ignore) {
return null;
}
}
// ---------------------------------------------------------------- get resource stream
/**
* Opens a resource of the specified name for reading.
* @see #getResourceAsStream(String, ClassLoader)
*/
public static InputStream getResourceAsStream(String resourceName) throws IOException {
return getResourceAsStream(resourceName, null);
}
/**
* Opens a resource of the specified name for reading.
* @see #getResourceAsStream(String, ClassLoader, boolean)
*/
public static InputStream getResourceAsStream(String resourceName, boolean useCache) throws IOException {
return getResourceAsStream(resourceName, null, useCache);
}
/**
* Opens a resource of the specified name for reading.
* @see #getResourceUrl(String, ClassLoader)
*/
public static InputStream getResourceAsStream(String resourceName, ClassLoader callingClass) throws IOException {
URL url = getResourceUrl(resourceName, callingClass);
if (url != null) {
return url.openStream();
}
return null;
}
/**
* Opens a resource of the specified name for reading. Controls caching,
* that is important when the same jar is reloaded using custom classloader.
*/
public static InputStream getResourceAsStream(String resourceName, ClassLoader callingClass, boolean useCache) throws IOException {
URL url = getResourceUrl(resourceName, callingClass);
if (url != null) {
URLConnection urlConnection = url.openConnection();
urlConnection.setUseCaches(useCache);
return urlConnection.getInputStream();
}
return null;
}
/**
* Opens a class of the specified name for reading using class classloader.
* @see #getResourceAsStream(String, ClassLoader)
*/
public static InputStream getClassAsStream(Class clazz) throws IOException {
return getResourceAsStream(getClassFileName(clazz), clazz.getClassLoader());
}
/**
* Opens a class of the specified name for reading. No specific classloader is used
* for loading class.
* @see #getResourceAsStream(String, ClassLoader)
*/
public static InputStream getClassAsStream(String className) throws IOException {
return getResourceAsStream(getClassFileName(className));
}
/**
* Opens a class of the specified name for reading using provided class loader.
*/
public static InputStream getClassAsStream(String className, ClassLoader classLoader) throws IOException {
return getResourceAsStream(getClassFileName(className), classLoader);
}
// ---------------------------------------------------------------- load class
/**
* Loads a class using default class loader strategy.
* @see jodd.util.cl.DefaultClassLoaderStrategy
*/
public static Class loadClass(String className) throws ClassNotFoundException {
return JoddCore.classLoaderStrategy.loadClass(className, null);
}
/**
* Loads a class using default class loader strategy.
* @see jodd.util.cl.DefaultClassLoaderStrategy
*/
public static Class loadClass(String className, ClassLoader classLoader) throws ClassNotFoundException {
return JoddCore.classLoaderStrategy.loadClass(className, classLoader);
}
// ---------------------------------------------------------------- misc
/**
* Resolves class file name from class name by replacing dot's with '/' separator
* and adding class extension at the end. If array, component type is returned.
*/
public static String getClassFileName(Class clazz) {
if (clazz.isArray()) {
clazz = clazz.getComponentType();
}
return getClassFileName(clazz.getName());
}
/**
* Resolves class file name from class name by replacing dot's with '/' separator.
*/
public static String getClassFileName(String className) {
return className.replace('.', '/') + ".class";
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.plan.hep;
import org.apache.calcite.linq4j.function.Function2;
import org.apache.calcite.linq4j.function.Functions;
import org.apache.calcite.plan.AbstractRelOptPlanner;
import org.apache.calcite.plan.CommonRelSubExprRule;
import org.apache.calcite.plan.Context;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptCostFactory;
import org.apache.calcite.plan.RelOptCostImpl;
import org.apache.calcite.plan.RelOptMaterialization;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleOperand;
import org.apache.calcite.plan.RelTrait;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.Converter;
import org.apache.calcite.rel.convert.ConverterRule;
import org.apache.calcite.rel.convert.TraitMatchingRule;
import org.apache.calcite.rel.core.RelFactories;
import org.apache.calcite.rel.metadata.RelMdUtil;
import org.apache.calcite.rel.metadata.RelMetadataProvider;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Util;
import org.apache.calcite.util.graph.BreadthFirstIterator;
import org.apache.calcite.util.graph.CycleDetector;
import org.apache.calcite.util.graph.DefaultDirectedGraph;
import org.apache.calcite.util.graph.DefaultEdge;
import org.apache.calcite.util.graph.DepthFirstIterator;
import org.apache.calcite.util.graph.DirectedGraph;
import org.apache.calcite.util.graph.Graphs;
import org.apache.calcite.util.graph.TopologicalOrderIterator;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* HepPlanner is a heuristic implementation of the {@link RelOptPlanner}
* interface.
*/
public class HepPlanner extends AbstractRelOptPlanner {
//~ Instance fields --------------------------------------------------------
private final HepProgram mainProgram;
private HepProgram currentProgram;
private HepRelVertex root;
private RelTraitSet requestedRootTraits;
private final Map<Pair<String, RelDataType>, HepRelVertex> mapDigestToVertex = new HashMap<>();
// NOTE jvs 24-Apr-2006: We use LinkedHashSet
// in order to provide deterministic behavior.
private final Set<RelOptRule> allRules = new LinkedHashSet<>();
private int nTransformations;
private int graphSizeLastGC;
private int nTransformationsLastGC;
private final boolean noDag;
/**
* Query graph, with edges directed from parent to child. This is a
* single-rooted DAG, possibly with additional roots corresponding to
* discarded plan fragments which remain to be garbage-collected.
*/
private final DirectedGraph<HepRelVertex, DefaultEdge> graph =
DefaultDirectedGraph.create();
private final Function2<RelNode, RelNode, Void> onCopyHook;
private final List<RelOptMaterialization> materializations =
new ArrayList<>();
//~ Constructors -----------------------------------------------------------
/**
* Creates a new HepPlanner that allows DAG.
*
* @param program program controlling rule application
*/
public HepPlanner(HepProgram program) {
this(program, null, false, null, RelOptCostImpl.FACTORY);
}
/**
* Creates a new HepPlanner that allows DAG.
*
* @param program program controlling rule application
* @param context to carry while planning
*/
public HepPlanner(HepProgram program, Context context) {
this(program, context, false, null, RelOptCostImpl.FACTORY);
}
/**
* Creates a new HepPlanner with the option to keep the graph a
* tree (noDag = true) or allow DAG (noDag = false).
*
* @param noDag If false, create shared nodes if expressions are
* identical
* @param program Program controlling rule application
* @param onCopyHook Function to call when a node is copied
*/
public HepPlanner(
HepProgram program,
Context context,
boolean noDag,
Function2<RelNode, RelNode, Void> onCopyHook,
RelOptCostFactory costFactory) {
super(costFactory, context);
this.mainProgram = program;
this.onCopyHook = Util.first(onCopyHook, Functions.ignore2());
this.noDag = noDag;
}
//~ Methods ----------------------------------------------------------------
// implement RelOptPlanner
public void setRoot(RelNode rel) {
root = addRelToGraph(rel);
dumpGraph();
}
// implement RelOptPlanner
public RelNode getRoot() {
return root;
}
public List<RelOptRule> getRules() {
return ImmutableList.copyOf(allRules);
}
// implement RelOptPlanner
public boolean addRule(RelOptRule rule) {
boolean added = allRules.add(rule);
if (added) {
mapRuleDescription(rule);
}
return added;
}
@Override public void clear() {
super.clear();
for (RelOptRule rule : ImmutableList.copyOf(allRules)) {
removeRule(rule);
}
this.materializations.clear();
}
public boolean removeRule(RelOptRule rule) {
unmapRuleDescription(rule);
return allRules.remove(rule);
}
// implement RelOptPlanner
public RelNode changeTraits(RelNode rel, RelTraitSet toTraits) {
// Ignore traits, except for the root, where we remember
// what the final conversion should be.
if ((rel == root) || (rel == root.getCurrentRel())) {
requestedRootTraits = toTraits;
}
return rel;
}
// implement RelOptPlanner
public RelNode findBestExp() {
assert root != null;
executeProgram(mainProgram);
// Get rid of everything except what's in the final plan.
collectGarbage();
return buildFinalPlan(root);
}
private void executeProgram(HepProgram program) {
HepProgram savedProgram = currentProgram;
currentProgram = program;
currentProgram.initialize(program == mainProgram);
for (HepInstruction instruction : currentProgram.instructions) {
instruction.execute(this);
int delta = nTransformations - nTransformationsLastGC;
if (delta > graphSizeLastGC) {
// The number of transformations performed since the last
// garbage collection is greater than the number of vertices in
// the graph at that time. That means there should be a
// reasonable amount of garbage to collect now. We do it this
// way to amortize garbage collection cost over multiple
// instructions, while keeping the highwater memory usage
// proportional to the graph size.
collectGarbage();
}
}
currentProgram = savedProgram;
}
void executeInstruction(
HepInstruction.MatchLimit instruction) {
LOGGER.trace("Setting match limit to {}", instruction.limit);
currentProgram.matchLimit = instruction.limit;
}
void executeInstruction(
HepInstruction.MatchOrder instruction) {
LOGGER.trace("Setting match order to {}", instruction.order);
currentProgram.matchOrder = instruction.order;
}
void executeInstruction(
HepInstruction.RuleInstance instruction) {
if (skippingGroup()) {
return;
}
if (instruction.rule == null) {
assert instruction.ruleDescription != null;
instruction.rule =
getRuleByDescription(instruction.ruleDescription);
LOGGER.trace("Looking up rule with description {}, found {}",
instruction.ruleDescription, instruction.rule);
}
if (instruction.rule != null) {
applyRules(
Collections.singleton(instruction.rule),
true);
}
}
void executeInstruction(
HepInstruction.RuleClass<?> instruction) {
if (skippingGroup()) {
return;
}
LOGGER.trace("Applying rule class {}", instruction.ruleClass);
if (instruction.ruleSet == null) {
instruction.ruleSet = new LinkedHashSet<>();
for (RelOptRule rule : allRules) {
if (instruction.ruleClass.isInstance(rule)) {
instruction.ruleSet.add(rule);
}
}
}
applyRules(instruction.ruleSet, true);
}
void executeInstruction(
HepInstruction.RuleCollection instruction) {
if (skippingGroup()) {
return;
}
applyRules(instruction.rules, true);
}
private boolean skippingGroup() {
if (currentProgram.group != null) {
// Skip if we've already collected the ruleset.
return !currentProgram.group.collecting;
} else {
// Not grouping.
return false;
}
}
void executeInstruction(
HepInstruction.ConverterRules instruction) {
assert currentProgram.group == null;
if (instruction.ruleSet == null) {
instruction.ruleSet = new LinkedHashSet<>();
for (RelOptRule rule : allRules) {
if (!(rule instanceof ConverterRule)) {
continue;
}
ConverterRule converter = (ConverterRule) rule;
if (converter.isGuaranteed() != instruction.guaranteed) {
continue;
}
// Add the rule itself to work top-down
instruction.ruleSet.add(converter);
if (!instruction.guaranteed) {
// Add a TraitMatchingRule to work bottom-up
instruction.ruleSet.add(
new TraitMatchingRule(converter, RelFactories.LOGICAL_BUILDER));
}
}
}
applyRules(instruction.ruleSet, instruction.guaranteed);
}
void executeInstruction(HepInstruction.CommonRelSubExprRules instruction) {
assert currentProgram.group == null;
if (instruction.ruleSet == null) {
instruction.ruleSet = new LinkedHashSet<>();
for (RelOptRule rule : allRules) {
if (!(rule instanceof CommonRelSubExprRule)) {
continue;
}
instruction.ruleSet.add(rule);
}
}
applyRules(instruction.ruleSet, true);
}
void executeInstruction(
HepInstruction.Subprogram instruction) {
LOGGER.trace("Entering subprogram");
for (;;) {
int nTransformationsBefore = nTransformations;
executeProgram(instruction.subprogram);
if (nTransformations == nTransformationsBefore) {
// Nothing happened this time around.
break;
}
}
LOGGER.trace("Leaving subprogram");
}
void executeInstruction(
HepInstruction.BeginGroup instruction) {
assert currentProgram.group == null;
currentProgram.group = instruction.endGroup;
LOGGER.trace("Entering group");
}
void executeInstruction(
HepInstruction.EndGroup instruction) {
assert currentProgram.group == instruction;
currentProgram.group = null;
instruction.collecting = false;
applyRules(instruction.ruleSet, true);
LOGGER.trace("Leaving group");
}
private int depthFirstApply(Iterator<HepRelVertex> iter,
Collection<RelOptRule> rules,
boolean forceConversions, int nMatches) {
while (iter.hasNext()) {
HepRelVertex vertex = iter.next();
for (RelOptRule rule : rules) {
HepRelVertex newVertex =
applyRule(rule, vertex, forceConversions);
if (newVertex == null || newVertex == vertex) {
continue;
}
++nMatches;
if (nMatches >= currentProgram.matchLimit) {
return nMatches;
}
// To the extent possible, pick up where we left
// off; have to create a new iterator because old
// one was invalidated by transformation.
Iterator<HepRelVertex> depthIter = getGraphIterator(newVertex);
nMatches = depthFirstApply(depthIter, rules, forceConversions,
nMatches);
break;
}
}
return nMatches;
}
private void applyRules(
Collection<RelOptRule> rules,
boolean forceConversions) {
if (currentProgram.group != null) {
assert currentProgram.group.collecting;
currentProgram.group.ruleSet.addAll(rules);
return;
}
LOGGER.trace("Applying rule set {}", rules);
boolean fullRestartAfterTransformation =
currentProgram.matchOrder != HepMatchOrder.ARBITRARY
&& currentProgram.matchOrder != HepMatchOrder.DEPTH_FIRST;
int nMatches = 0;
boolean fixedPoint;
do {
Iterator<HepRelVertex> iter = getGraphIterator(root);
fixedPoint = true;
while (iter.hasNext()) {
HepRelVertex vertex = iter.next();
for (RelOptRule rule : rules) {
HepRelVertex newVertex =
applyRule(rule, vertex, forceConversions);
if (newVertex == null || newVertex == vertex) {
continue;
}
++nMatches;
if (nMatches >= currentProgram.matchLimit) {
return;
}
if (fullRestartAfterTransformation) {
iter = getGraphIterator(root);
} else {
// To the extent possible, pick up where we left
// off; have to create a new iterator because old
// one was invalidated by transformation.
iter = getGraphIterator(newVertex);
if (currentProgram.matchOrder == HepMatchOrder.DEPTH_FIRST) {
nMatches =
depthFirstApply(iter, rules, forceConversions, nMatches);
if (nMatches >= currentProgram.matchLimit) {
return;
}
}
// Remember to go around again since we're
// skipping some stuff.
fixedPoint = false;
}
break;
}
}
} while (!fixedPoint);
}
private Iterator<HepRelVertex> getGraphIterator(HepRelVertex start) {
// Make sure there's no garbage, because topological sort
// doesn't start from a specific root, and rules can't
// deal with firing on garbage.
// FIXME jvs 25-Sept-2006: I had to move this earlier because
// of FRG-215, which is still under investigation. Once we
// figure that one out, move down to location below for
// better optimizer performance.
collectGarbage();
switch (currentProgram.matchOrder) {
case ARBITRARY:
case DEPTH_FIRST:
return DepthFirstIterator.of(graph, start).iterator();
case TOP_DOWN:
assert start == root;
// see above
/*
collectGarbage();
*/
return TopologicalOrderIterator.of(graph).iterator();
case BOTTOM_UP:
default:
assert start == root;
// see above
/*
collectGarbage();
*/
// TODO jvs 4-Apr-2006: enhance TopologicalOrderIterator
// to support reverse walk.
final List<HepRelVertex> list = new ArrayList<>();
for (HepRelVertex vertex : TopologicalOrderIterator.of(graph)) {
list.add(vertex);
}
Collections.reverse(list);
return list.iterator();
}
}
/** Returns whether the vertex is valid. */
private boolean belongsToDag(HepRelVertex vertex) {
Pair<String, RelDataType> key = key(vertex.getCurrentRel());
return mapDigestToVertex.get(key) != null;
}
private HepRelVertex applyRule(
RelOptRule rule,
HepRelVertex vertex,
boolean forceConversions) {
if (!belongsToDag(vertex)) {
return null;
}
RelTrait parentTrait = null;
List<RelNode> parents = null;
if (rule instanceof ConverterRule) {
// Guaranteed converter rules require special casing to make sure
// they only fire where actually needed, otherwise they tend to
// fire to infinity and beyond.
ConverterRule converterRule = (ConverterRule) rule;
if (converterRule.isGuaranteed() || !forceConversions) {
if (!doesConverterApply(converterRule, vertex)) {
return null;
}
parentTrait = converterRule.getOutTrait();
}
} else if (rule instanceof CommonRelSubExprRule) {
// Only fire CommonRelSubExprRules if the vertex is a common
// subexpression.
List<HepRelVertex> parentVertices = getVertexParents(vertex);
if (parentVertices.size() < 2) {
return null;
}
parents = new ArrayList<>();
for (HepRelVertex pVertex : parentVertices) {
parents.add(pVertex.getCurrentRel());
}
}
final List<RelNode> bindings = new ArrayList<>();
final Map<RelNode, List<RelNode>> nodeChildren = new HashMap<>();
boolean match =
matchOperands(
rule.getOperand(),
vertex.getCurrentRel(),
bindings,
nodeChildren);
if (!match) {
return null;
}
HepRuleCall call =
new HepRuleCall(
this,
rule.getOperand(),
bindings.toArray(new RelNode[0]),
nodeChildren,
parents);
// Allow the rule to apply its own side-conditions.
if (!rule.matches(call)) {
return null;
}
fireRule(call);
if (!call.getResults().isEmpty()) {
return applyTransformationResults(
vertex,
call,
parentTrait);
}
return null;
}
private boolean doesConverterApply(
ConverterRule converterRule,
HepRelVertex vertex) {
RelTrait outTrait = converterRule.getOutTrait();
List<HepRelVertex> parents = Graphs.predecessorListOf(graph, vertex);
for (HepRelVertex parent : parents) {
RelNode parentRel = parent.getCurrentRel();
if (parentRel instanceof Converter) {
// We don't support converter chains.
continue;
}
if (parentRel.getTraitSet().contains(outTrait)) {
// This parent wants the traits produced by the converter.
return true;
}
}
return (vertex == root)
&& (requestedRootTraits != null)
&& requestedRootTraits.contains(outTrait);
}
/**
* Retrieves the parent vertices of a vertex. If a vertex appears multiple
* times as an input into a parent, then that counts as multiple parents,
* one per input reference.
*
* @param vertex the vertex
* @return the list of parents for the vertex
*/
private List<HepRelVertex> getVertexParents(HepRelVertex vertex) {
final List<HepRelVertex> parents = new ArrayList<>();
final List<HepRelVertex> parentVertices =
Graphs.predecessorListOf(graph, vertex);
for (HepRelVertex pVertex : parentVertices) {
RelNode parent = pVertex.getCurrentRel();
for (int i = 0; i < parent.getInputs().size(); i++) {
HepRelVertex child = (HepRelVertex) parent.getInputs().get(i);
if (child == vertex) {
parents.add(pVertex);
}
}
}
return parents;
}
private boolean matchOperands(
RelOptRuleOperand operand,
RelNode rel,
List<RelNode> bindings,
Map<RelNode, List<RelNode>> nodeChildren) {
if (!operand.matches(rel)) {
return false;
}
bindings.add(rel);
@SuppressWarnings("unchecked")
List<HepRelVertex> childRels = (List) rel.getInputs();
switch (operand.childPolicy) {
case ANY:
return true;
case UNORDERED:
// For each operand, at least one child must match. If
// matchAnyChildren, usually there's just one operand.
for (RelOptRuleOperand childOperand : operand.getChildOperands()) {
boolean match = false;
for (HepRelVertex childRel : childRels) {
match =
matchOperands(
childOperand,
childRel.getCurrentRel(),
bindings,
nodeChildren);
if (match) {
break;
}
}
if (!match) {
return false;
}
}
final List<RelNode> children = new ArrayList<>(childRels.size());
for (HepRelVertex childRel : childRels) {
children.add(childRel.getCurrentRel());
}
nodeChildren.put(rel, children);
return true;
default:
int n = operand.getChildOperands().size();
if (childRels.size() < n) {
return false;
}
for (Pair<HepRelVertex, RelOptRuleOperand> pair
: Pair.zip(childRels, operand.getChildOperands())) {
boolean match =
matchOperands(
pair.right,
pair.left.getCurrentRel(),
bindings,
nodeChildren);
if (!match) {
return false;
}
}
return true;
}
}
private HepRelVertex applyTransformationResults(
HepRelVertex vertex,
HepRuleCall call,
RelTrait parentTrait) {
// TODO jvs 5-Apr-2006: Take the one that gives the best
// global cost rather than the best local cost. That requires
// "tentative" graph edits.
assert !call.getResults().isEmpty();
RelNode bestRel = null;
if (call.getResults().size() == 1) {
// No costing required; skip it to minimize the chance of hitting
// rels without cost information.
bestRel = call.getResults().get(0);
} else {
RelOptCost bestCost = null;
final RelMetadataQuery mq = call.getMetadataQuery();
for (RelNode rel : call.getResults()) {
RelOptCost thisCost = getCost(rel, mq);
if (LOGGER.isTraceEnabled()) {
// Keep in the isTraceEnabled for the getRowCount method call
LOGGER.trace("considering {} with cumulative cost={} and rowcount={}",
rel, thisCost, mq.getRowCount(rel));
}
if ((bestRel == null) || thisCost.isLt(bestCost)) {
bestRel = rel;
bestCost = thisCost;
}
}
}
++nTransformations;
notifyTransformation(
call,
bestRel,
true);
// Before we add the result, make a copy of the list of vertex's
// parents. We'll need this later during contraction so that
// we only update the existing parents, not the new parents
// (otherwise loops can result). Also take care of filtering
// out parents by traits in case we're dealing with a converter rule.
final List<HepRelVertex> allParents =
Graphs.predecessorListOf(graph, vertex);
final List<HepRelVertex> parents = new ArrayList<>();
for (HepRelVertex parent : allParents) {
if (parentTrait != null) {
RelNode parentRel = parent.getCurrentRel();
if (parentRel instanceof Converter) {
// We don't support automatically chaining conversions.
// Treating a converter as a candidate parent here
// can cause the "iParentMatch" check below to
// throw away a new converter needed in
// the multi-parent DAG case.
continue;
}
if (!parentRel.getTraitSet().contains(parentTrait)) {
// This parent does not want the converted result.
continue;
}
}
parents.add(parent);
}
HepRelVertex newVertex = addRelToGraph(bestRel);
// There's a chance that newVertex is the same as one
// of the parents due to common subexpression recognition
// (e.g. the LogicalProject added by JoinCommuteRule). In that
// case, treat the transformation as a nop to avoid
// creating a loop.
int iParentMatch = parents.indexOf(newVertex);
if (iParentMatch != -1) {
newVertex = parents.get(iParentMatch);
} else {
contractVertices(newVertex, vertex, parents);
}
if (getListener() != null) {
// Assume listener doesn't want to see garbage.
collectGarbage();
}
notifyTransformation(
call,
bestRel,
false);
dumpGraph();
return newVertex;
}
// implement RelOptPlanner
public RelNode register(
RelNode rel,
RelNode equivRel) {
// Ignore; this call is mostly to tell Volcano how to avoid
// infinite loops.
return rel;
}
@Override public void onCopy(RelNode rel, RelNode newRel) {
onCopyHook.apply(rel, newRel);
}
// implement RelOptPlanner
public RelNode ensureRegistered(RelNode rel, RelNode equivRel) {
return rel;
}
// implement RelOptPlanner
public boolean isRegistered(RelNode rel) {
return true;
}
private HepRelVertex addRelToGraph(
RelNode rel) {
// Check if a transformation already produced a reference
// to an existing vertex.
if (graph.vertexSet().contains(rel)) {
return (HepRelVertex) rel;
}
// Recursively add children, replacing this rel's inputs
// with corresponding child vertices.
final List<RelNode> inputs = rel.getInputs();
final List<RelNode> newInputs = new ArrayList<>();
for (RelNode input1 : inputs) {
HepRelVertex childVertex = addRelToGraph(input1);
newInputs.add(childVertex);
}
if (!Util.equalShallow(inputs, newInputs)) {
RelNode oldRel = rel;
rel = rel.copy(rel.getTraitSet(), newInputs);
onCopy(oldRel, rel);
}
// Compute digest first time we add to DAG,
// otherwise can't get equivVertex for common sub-expression
rel.recomputeDigest();
// try to find equivalent rel only if DAG is allowed
if (!noDag) {
// Now, check if an equivalent vertex already exists in graph.
Pair<String, RelDataType> key = key(rel);
HepRelVertex equivVertex = mapDigestToVertex.get(key);
if (equivVertex != null) {
// Use existing vertex.
return equivVertex;
}
}
// No equivalence: create a new vertex to represent this rel.
HepRelVertex newVertex = new HepRelVertex(rel);
graph.addVertex(newVertex);
updateVertex(newVertex, rel);
for (RelNode input : rel.getInputs()) {
graph.addEdge(newVertex, (HepRelVertex) input);
}
nTransformations++;
return newVertex;
}
private void contractVertices(
HepRelVertex preservedVertex,
HepRelVertex discardedVertex,
List<HepRelVertex> parents) {
if (preservedVertex == discardedVertex) {
// Nop.
return;
}
RelNode rel = preservedVertex.getCurrentRel();
updateVertex(preservedVertex, rel);
// Update specified parents of discardedVertex.
for (HepRelVertex parent : parents) {
RelNode parentRel = parent.getCurrentRel();
List<RelNode> inputs = parentRel.getInputs();
for (int i = 0; i < inputs.size(); ++i) {
RelNode child = inputs.get(i);
if (child != discardedVertex) {
continue;
}
parentRel.replaceInput(i, preservedVertex);
}
RelMdUtil.clearCache(parentRel);
graph.removeEdge(parent, discardedVertex);
graph.addEdge(parent, preservedVertex);
updateVertex(parent, parentRel);
}
// NOTE: we don't actually do graph.removeVertex(discardedVertex),
// because it might still be reachable from preservedVertex.
// Leave that job for garbage collection.
if (discardedVertex == root) {
root = preservedVertex;
}
}
private void updateVertex(HepRelVertex vertex, RelNode rel) {
if (rel != vertex.getCurrentRel()) {
// REVIEW jvs 5-Apr-2006: We'll do this again later
// during garbage collection. Or we could get rid
// of mark/sweep garbage collection and do it precisely
// at this point by walking down to all rels which are only
// reachable from here.
notifyDiscard(vertex.getCurrentRel());
}
Pair<String, RelDataType> oldKey = key(vertex.getCurrentRel());
if (mapDigestToVertex.get(oldKey) == vertex) {
mapDigestToVertex.remove(oldKey);
}
// When a transformation happened in one rule apply, support
// vertex2 replace vertex1, but the current relNode of
// vertex1 and vertex2 is same,
// then the digest is also same. but we can't remove vertex2,
// otherwise the digest will be removed wrongly in the mapDigestToVertex
// when collectGC
// so it must update the digest that map to vertex
Pair<String, RelDataType> newKey = key(rel);
mapDigestToVertex.put(newKey, vertex);
if (rel != vertex.getCurrentRel()) {
vertex.replaceRel(rel);
}
notifyEquivalence(
rel,
vertex,
false);
}
private static Pair<String, RelDataType> key(RelNode rel) {
return Pair.of(rel.getDigest(), rel.getRowType());
}
private RelNode buildFinalPlan(HepRelVertex vertex) {
RelNode rel = vertex.getCurrentRel();
notifyChosen(rel);
// Recursively process children, replacing this rel's inputs
// with corresponding child rels.
List<RelNode> inputs = rel.getInputs();
for (int i = 0; i < inputs.size(); ++i) {
RelNode child = inputs.get(i);
if (!(child instanceof HepRelVertex)) {
// Already replaced.
continue;
}
child = buildFinalPlan((HepRelVertex) child);
rel.replaceInput(i, child);
}
RelMdUtil.clearCache(rel);
rel.recomputeDigest();
return rel;
}
private void collectGarbage() {
if (nTransformations == nTransformationsLastGC) {
// No modifications have taken place since the last gc,
// so there can't be any garbage.
return;
}
nTransformationsLastGC = nTransformations;
LOGGER.trace("collecting garbage");
// Yer basic mark-and-sweep.
final Set<HepRelVertex> rootSet = new HashSet<>();
if (graph.vertexSet().contains(root)) {
BreadthFirstIterator.reachable(rootSet, graph, root);
}
if (rootSet.size() == graph.vertexSet().size()) {
// Everything is reachable: no garbage to collect.
return;
}
final Set<HepRelVertex> sweepSet = new HashSet<>();
for (HepRelVertex vertex : graph.vertexSet()) {
if (!rootSet.contains(vertex)) {
sweepSet.add(vertex);
RelNode rel = vertex.getCurrentRel();
notifyDiscard(rel);
}
}
assert !sweepSet.isEmpty();
graph.removeAllVertices(sweepSet);
graphSizeLastGC = graph.vertexSet().size();
// Clean up digest map too.
Iterator<Map.Entry<Pair<String, RelDataType>, HepRelVertex>> digestIter =
mapDigestToVertex.entrySet().iterator();
while (digestIter.hasNext()) {
HepRelVertex vertex = digestIter.next().getValue();
if (sweepSet.contains(vertex)) {
digestIter.remove();
}
}
}
private void assertNoCycles() {
// Verify that the graph is acyclic.
final CycleDetector<HepRelVertex, DefaultEdge> cycleDetector =
new CycleDetector<>(graph);
Set<HepRelVertex> cyclicVertices = cycleDetector.findCycles();
if (cyclicVertices.isEmpty()) {
return;
}
throw new AssertionError("Query graph cycle detected in HepPlanner: "
+ cyclicVertices);
}
private void dumpGraph() {
if (!LOGGER.isTraceEnabled()) {
return;
}
assertNoCycles();
final RelMetadataQuery mq = root.getCluster().getMetadataQuery();
final StringBuilder sb = new StringBuilder();
sb.append("\nBreadth-first from root: {\n");
for (HepRelVertex vertex : BreadthFirstIterator.of(graph, root)) {
sb.append(" ")
.append(vertex)
.append(" = ");
RelNode rel = vertex.getCurrentRel();
sb.append(rel)
.append(", rowcount=")
.append(mq.getRowCount(rel))
.append(", cumulative cost=")
.append(getCost(rel, mq))
.append('\n');
}
sb.append("}");
LOGGER.trace(sb.toString());
}
// implement RelOptPlanner
public void registerMetadataProviders(List<RelMetadataProvider> list) {
list.add(0, new HepRelMetadataProvider());
}
// implement RelOptPlanner
public long getRelMetadataTimestamp(RelNode rel) {
// TODO jvs 20-Apr-2006: This is overly conservative. Better would be
// to keep a timestamp per HepRelVertex, and update only affected
// vertices and all ancestors on each transformation.
return nTransformations;
}
@Override public ImmutableList<RelOptMaterialization> getMaterializations() {
return ImmutableList.copyOf(materializations);
}
@Override public void addMaterialization(RelOptMaterialization materialization) {
materializations.add(materialization);
}
}
// End HepPlanner.java
|
|
/*
* Copyright (c) 1997, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.x509;
import java.io.IOException;
import java.io.OutputStream;
import java.security.cert.CRLReason;
import java.util.Enumeration;
import sun.security.util.*;
/**
* The reasonCode is a non-critical CRL entry extension that identifies
* the reason for the certificate revocation. CAs are strongly
* encouraged to include reason codes in CRL entries; however, the
* reason code CRL entry extension should be absent instead of using the
* unspecified (0) reasonCode value.
* <p>The ASN.1 syntax for this is:
* <pre>
* id-ce-cRLReason OBJECT IDENTIFIER ::= { id-ce 21 }
*
* -- reasonCode ::= { CRLReason }
*
* CRLReason ::= ENUMERATED {
* unspecified (0),
* keyCompromise (1),
* cACompromise (2),
* affiliationChanged (3),
* superseded (4),
* cessationOfOperation (5),
* certificateHold (6),
* removeFromCRL (8),
* privilegeWithdrawn (9),
* aACompromise (10) }
* </pre>
* @author Hemma Prafullchandra
* @see Extension
* @see CertAttrSet
*/
public class CRLReasonCodeExtension extends Extension
implements CertAttrSet<String> {
/**
* Attribute name and Reason codes
*/
public static final String NAME = "CRLReasonCode";
public static final String REASON = "reason";
public static final int UNSPECIFIED = 0;
public static final int KEY_COMPROMISE = 1;
public static final int CA_COMPROMISE = 2;
public static final int AFFLIATION_CHANGED = 3;
public static final int SUPERSEDED = 4;
public static final int CESSATION_OF_OPERATION = 5;
public static final int CERTIFICATE_HOLD = 6;
// note 7 missing in syntax
public static final int REMOVE_FROM_CRL = 8;
public static final int PRIVILEGE_WITHDRAWN = 9;
public static final int AA_COMPROMISE = 10;
private static CRLReason[] values = CRLReason.values();
private int reasonCode = 0;
private void encodeThis() throws IOException {
if (reasonCode == 0) {
this.extensionValue = null;
return;
}
DerOutputStream dos = new DerOutputStream();
dos.putEnumerated(reasonCode);
this.extensionValue = dos.toByteArray();
}
/**
* Create a CRLReasonCodeExtension with the passed in reason.
* Criticality automatically set to false.
*
* @param reason the enumerated value for the reason code.
*/
public CRLReasonCodeExtension(int reason) throws IOException {
this(false, reason);
}
/**
* Create a CRLReasonCodeExtension with the passed in reason.
*
* @param critical true if the extension is to be treated as critical.
* @param reason the enumerated value for the reason code.
*/
public CRLReasonCodeExtension(boolean critical, int reason)
throws IOException {
this.extensionId = PKIXExtensions.ReasonCode_Id;
this.critical = critical;
this.reasonCode = reason;
encodeThis();
}
/**
* Create the extension from the passed DER encoded value of the same.
*
* @param critical true if the extension is to be treated as critical.
* @param value an array of DER encoded bytes of the actual value.
* @exception ClassCastException if value is not an array of bytes
* @exception IOException on error.
*/
public CRLReasonCodeExtension(Boolean critical, Object value)
throws IOException {
this.extensionId = PKIXExtensions.ReasonCode_Id;
this.critical = critical.booleanValue();
this.extensionValue = (byte[]) value;
DerValue val = new DerValue(this.extensionValue);
this.reasonCode = val.getEnumerated();
}
/**
* Set the attribute value.
*/
public void set(String name, Object obj) throws IOException {
if (!(obj instanceof Integer)) {
throw new IOException("Attribute must be of type Integer.");
}
if (name.equalsIgnoreCase(REASON)) {
reasonCode = ((Integer)obj).intValue();
} else {
throw new IOException
("Name not supported by CRLReasonCodeExtension");
}
encodeThis();
}
/**
* Get the attribute value.
*/
public Object get(String name) throws IOException {
if (name.equalsIgnoreCase(REASON)) {
return new Integer(reasonCode);
} else {
throw new IOException
("Name not supported by CRLReasonCodeExtension");
}
}
/**
* Delete the attribute value.
*/
public void delete(String name) throws IOException {
if (name.equalsIgnoreCase(REASON)) {
reasonCode = 0;
} else {
throw new IOException
("Name not supported by CRLReasonCodeExtension");
}
encodeThis();
}
/**
* Returns a printable representation of the Reason code.
*/
public String toString() {
return super.toString() + " Reason Code: " + values[reasonCode];
}
/**
* Write the extension to the DerOutputStream.
*
* @param out the DerOutputStream to write the extension to.
* @exception IOException on encoding errors.
*/
public void encode(OutputStream out) throws IOException {
DerOutputStream tmp = new DerOutputStream();
if (this.extensionValue == null) {
this.extensionId = PKIXExtensions.ReasonCode_Id;
this.critical = false;
encodeThis();
}
super.encode(tmp);
out.write(tmp.toByteArray());
}
/**
* Return an enumeration of names of attributes existing within this
* attribute.
*/
public Enumeration<String> getElements() {
AttributeNameEnumeration elements = new AttributeNameEnumeration();
elements.addElement(REASON);
return elements.elements();
}
/**
* Return the name of this attribute.
*/
public String getName() {
return NAME;
}
/**
* Return the reason as a CRLReason enum.
*/
public CRLReason getReasonCode() {
// if out-of-range, return UNSPECIFIED
if (reasonCode > 0 && reasonCode < values.length) {
return values[reasonCode];
} else {
return CRLReason.UNSPECIFIED;
}
}
}
|
|
/*
* Copyright 2009 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.ld.batch.dataaccess.impl;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
import org.kuali.kfs.module.ld.batch.dataaccess.LedgerPreparedStatementCachingDao;
import org.kuali.kfs.module.ld.businessobject.LaborObject;
import org.kuali.kfs.module.ld.businessobject.LedgerBalance;
import org.kuali.kfs.module.ld.businessobject.LedgerEntry;
import org.kuali.kfs.sys.batch.dataaccess.impl.AbstractPreparedStatementCachingDaoJdbc;
import org.kuali.rice.core.api.util.type.KualiDecimal;
public class LedgerPreparedStatementCachingDaoJdbc extends AbstractPreparedStatementCachingDaoJdbc implements LedgerPreparedStatementCachingDao {
static final Map<String, String> sql = new HashMap<String, String>();
static {
sql.put(RETRIEVE_PREFIX + LaborObject.class, "select finobj_frngslry_cd from LD_LABOR_OBJ_T where univ_fiscal_yr = ? and fin_coa_cd = ? and fin_object_cd = ?");
sql.put(RETRIEVE_PREFIX + Integer.class, "select max(trn_entr_seq_nbr) from LD_LDGR_ENTR_T where univ_fiscal_yr = ? and fin_coa_cd = ? and account_nbr = ? and sub_acct_nbr = ? and fin_object_cd = ? and fin_sub_obj_cd = ? and fin_balance_typ_cd = ? and fin_obj_typ_cd = ? and univ_fiscal_prd_cd = ? and fdoc_typ_cd = ? and fs_origin_cd = ? and fdoc_nbr = ?");
sql.put(RETRIEVE_PREFIX + LedgerBalance.class, "select ACLN_ANNL_BAL_AMT, FIN_BEG_BAL_LN_AMT, CONTR_GR_BB_AC_AMT, MO1_ACCT_LN_AMT, MO2_ACCT_LN_AMT, MO3_ACCT_LN_AMT, MO4_ACCT_LN_AMT, MO5_ACCT_LN_AMT, MO6_ACCT_LN_AMT, MO7_ACCT_LN_AMT, MO8_ACCT_LN_AMT, MO9_ACCT_LN_AMT, MO10_ACCT_LN_AMT, MO11_ACCT_LN_AMT, MO12_ACCT_LN_AMT, MO13_ACCT_LN_AMT from LD_LDGR_BAL_T where UNIV_FISCAL_YR = ? and FIN_COA_CD = ? and ACCOUNT_NBR = ? and SUB_ACCT_NBR = ? and FIN_OBJECT_CD = ? and FIN_SUB_OBJ_CD = ? and FIN_BALANCE_TYP_CD = ? and FIN_OBJ_TYP_CD = ? and POSITION_NBR = ? and EMPLID = ?");
sql.put(INSERT_PREFIX + LedgerBalance.class, "insert into LD_LDGR_BAL_T (UNIV_FISCAL_YR, FIN_COA_CD, ACCOUNT_NBR, SUB_ACCT_NBR, FIN_OBJECT_CD, FIN_SUB_OBJ_CD, FIN_BALANCE_TYP_CD, FIN_OBJ_TYP_CD, POSITION_NBR, EMPLID, OBJ_ID, VER_NBR, ACLN_ANNL_BAL_AMT, FIN_BEG_BAL_LN_AMT, CONTR_GR_BB_AC_AMT, MO1_ACCT_LN_AMT, MO2_ACCT_LN_AMT, MO3_ACCT_LN_AMT, MO4_ACCT_LN_AMT, MO5_ACCT_LN_AMT, MO6_ACCT_LN_AMT, MO7_ACCT_LN_AMT, MO8_ACCT_LN_AMT, MO9_ACCT_LN_AMT, MO10_ACCT_LN_AMT, MO11_ACCT_LN_AMT, MO12_ACCT_LN_AMT, MO13_ACCT_LN_AMT, TIMESTAMP) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");
sql.put(UPDATE_PREFIX + LedgerBalance.class, "update LD_LDGR_BAL_T set ACLN_ANNL_BAL_AMT = ?, FIN_BEG_BAL_LN_AMT = ?, CONTR_GR_BB_AC_AMT = ?, MO1_ACCT_LN_AMT = ?, MO2_ACCT_LN_AMT = ?, MO3_ACCT_LN_AMT = ?, MO4_ACCT_LN_AMT = ?, MO5_ACCT_LN_AMT = ?, MO6_ACCT_LN_AMT = ?, MO7_ACCT_LN_AMT = ?, MO8_ACCT_LN_AMT = ?, MO9_ACCT_LN_AMT = ?, MO10_ACCT_LN_AMT = ?, MO11_ACCT_LN_AMT = ?, MO12_ACCT_LN_AMT = ?, MO13_ACCT_LN_AMT = ?, TIMESTAMP = ? where UNIV_FISCAL_YR = ? and FIN_COA_CD = ? and ACCOUNT_NBR = ? and SUB_ACCT_NBR = ? and FIN_OBJECT_CD = ? and FIN_SUB_OBJ_CD = ? and FIN_BALANCE_TYP_CD = ? and FIN_OBJ_TYP_CD = ? and POSITION_NBR = ? and EMPLID = ?");
sql.put(INSERT_PREFIX + LedgerEntry.class, "INSERT INTO LD_LDGR_ENTR_T (UNIV_FISCAL_YR, FIN_COA_CD,ACCOUNT_NBR, SUB_ACCT_NBR, FIN_OBJECT_CD, FIN_SUB_OBJ_CD, FIN_BALANCE_TYP_CD, FIN_OBJ_TYP_CD, UNIV_FISCAL_PRD_CD, FDOC_TYP_CD, FS_ORIGIN_CD, FDOC_NBR, TRN_ENTR_SEQ_NBR, OBJ_ID, VER_NBR, POSITION_NBR, PROJECT_CD, TRN_LDGR_ENTR_DESC, TRN_LDGR_ENTR_AMT, TRN_DEBIT_CRDT_CD, TRANSACTION_DT, ORG_DOC_NBR, ORG_REFERENCE_ID, FDOC_REF_TYP_CD, FS_REF_ORIGIN_CD, FDOC_REF_NBR, FDOC_REVERSAL_DT, TRN_ENCUM_UPDT_CD, TRN_POST_DT, PAY_PERIOD_END_DT, TRN_TOTAL_HR, PYRL_DT_FSCL_YR, PYRL_DT_FSCLPRD_CD, EMPLID, EMPL_RCD, ERNCD, PAYGROUP, SAL_ADMIN_PLAN, GRADE, RUN_ID, LL_ORIG_FIN_COA_CD, LL_ORIG_ACCT_NBR, LL_ORIG_SUB_ACCT_NBR, LL_ORIG_FIN_OBJECT_CD, LL_ORIG_FIN_SUB_OBJ_CD, COMPANY, SETID, TIMESTAMP) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");
}
@Override
protected Map<String, String> getSql() {
return sql;
}
@Override
public LaborObject getLaborObject(final Integer fiscalYear, final String chartCode, final String objectCode) {
return new RetrievingJdbcWrapper<LaborObject>() {
@Override
protected void populateStatement(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, fiscalYear);
preparedStatement.setString(2, chartCode);
preparedStatement.setString(3, objectCode);
}
@Override
protected LaborObject extractResult(ResultSet resultSet) throws SQLException {
LaborObject laborObject = new LaborObject();
laborObject.setUniversityFiscalYear(fiscalYear);
laborObject.setChartOfAccountsCode(chartCode);
laborObject.setFinancialObjectCode(objectCode);
laborObject.setFinancialObjectFringeOrSalaryCode(resultSet.getString(1));
return laborObject;
}
}.get(LaborObject.class);
}
@Override
public int getMaxLaborSequenceNumber(final LedgerEntry t) {
return new RetrievingJdbcWrapper<Integer>() {
@Override
protected void populateStatement(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, t.getUniversityFiscalYear());
preparedStatement.setString(2, t.getChartOfAccountsCode());
preparedStatement.setString(3, t.getAccountNumber());
preparedStatement.setString(4, t.getSubAccountNumber());
preparedStatement.setString(5, t.getFinancialObjectCode());
preparedStatement.setString(6, t.getFinancialSubObjectCode());
preparedStatement.setString(7, t.getFinancialBalanceTypeCode());
preparedStatement.setString(8, t.getFinancialObjectTypeCode());
preparedStatement.setString(9, t.getUniversityFiscalPeriodCode());
preparedStatement.setString(10, t.getFinancialDocumentTypeCode());
preparedStatement.setString(11, t.getFinancialSystemOriginationCode());
preparedStatement.setString(12, t.getDocumentNumber());
}
@Override
protected Integer extractResult(ResultSet resultSet) throws SQLException {
return resultSet.getInt(1);
}
}.get(Integer.class);
}
@Override
public LedgerBalance getLedgerBalance(final LedgerBalance lb) {
return new RetrievingJdbcWrapper<LedgerBalance>() {
@Override
protected void populateStatement(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, lb.getUniversityFiscalYear());
preparedStatement.setString(2, lb.getChartOfAccountsCode());
preparedStatement.setString(3, lb.getAccountNumber());
preparedStatement.setString(4, lb.getSubAccountNumber());
preparedStatement.setString(5, lb.getFinancialObjectCode());
preparedStatement.setString(6, lb.getFinancialSubObjectCode());
preparedStatement.setString(7, lb.getFinancialBalanceTypeCode());
preparedStatement.setString(8, lb.getFinancialObjectTypeCode());
preparedStatement.setString(9, lb.getPositionNumber());
preparedStatement.setString(10, lb.getEmplid());
}
@Override
protected LedgerBalance extractResult(ResultSet resultSet) throws SQLException {
LedgerBalance ledgerBalance = new LedgerBalance();
ledgerBalance.setUniversityFiscalYear(lb.getUniversityFiscalYear());
ledgerBalance.setChartOfAccountsCode(lb.getChartOfAccountsCode());
ledgerBalance.setAccountNumber(lb.getAccountNumber());
ledgerBalance.setSubAccountNumber(lb.getSubAccountNumber());
ledgerBalance.setFinancialObjectCode(lb.getFinancialObjectCode());
ledgerBalance.setFinancialSubObjectCode(lb.getFinancialSubObjectCode());
ledgerBalance.setFinancialBalanceTypeCode(lb.getFinancialBalanceTypeCode());
ledgerBalance.setFinancialObjectTypeCode(lb.getFinancialObjectTypeCode());
ledgerBalance.setPositionNumber(lb.getPositionNumber());
ledgerBalance.setEmplid(lb.getEmplid());
ledgerBalance.setAccountLineAnnualBalanceAmount(new KualiDecimal(resultSet.getBigDecimal(1)));
ledgerBalance.setBeginningBalanceLineAmount(new KualiDecimal(resultSet.getBigDecimal(2)));
ledgerBalance.setContractsGrantsBeginningBalanceAmount(new KualiDecimal(resultSet.getBigDecimal(3)));
ledgerBalance.setMonth1Amount(new KualiDecimal(resultSet.getBigDecimal(4)));
ledgerBalance.setMonth2Amount(new KualiDecimal(resultSet.getBigDecimal(5)));
ledgerBalance.setMonth3Amount(new KualiDecimal(resultSet.getBigDecimal(6)));
ledgerBalance.setMonth4Amount(new KualiDecimal(resultSet.getBigDecimal(7)));
ledgerBalance.setMonth5Amount(new KualiDecimal(resultSet.getBigDecimal(8)));
ledgerBalance.setMonth6Amount(new KualiDecimal(resultSet.getBigDecimal(9)));
ledgerBalance.setMonth7Amount(new KualiDecimal(resultSet.getBigDecimal(10)));
ledgerBalance.setMonth8Amount(new KualiDecimal(resultSet.getBigDecimal(11)));
ledgerBalance.setMonth9Amount(new KualiDecimal(resultSet.getBigDecimal(12)));
ledgerBalance.setMonth10Amount(new KualiDecimal(resultSet.getBigDecimal(13)));
ledgerBalance.setMonth11Amount(new KualiDecimal(resultSet.getBigDecimal(14)));
ledgerBalance.setMonth12Amount(new KualiDecimal(resultSet.getBigDecimal(15)));
ledgerBalance.setMonth13Amount(new KualiDecimal(resultSet.getBigDecimal(16)));
return ledgerBalance;
}
}.get(LedgerBalance.class);
}
@Override
public void insertLedgerBalance(final LedgerBalance ledgerBalance, final Timestamp currentTimestamp) {
new InsertingJdbcWrapper<LedgerBalance>() {
@Override
protected void populateStatement(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, ledgerBalance.getUniversityFiscalYear());
preparedStatement.setString(2, ledgerBalance.getChartOfAccountsCode());
preparedStatement.setString(3, ledgerBalance.getAccountNumber());
preparedStatement.setString(4, ledgerBalance.getSubAccountNumber());
preparedStatement.setString(5, ledgerBalance.getFinancialObjectCode());
preparedStatement.setString(6, ledgerBalance.getFinancialSubObjectCode());
preparedStatement.setString(7, ledgerBalance.getFinancialBalanceTypeCode());
preparedStatement.setString(8, ledgerBalance.getFinancialObjectTypeCode());
preparedStatement.setString(9, ledgerBalance.getPositionNumber());
preparedStatement.setString(10, ledgerBalance.getEmplid());
if (ledgerBalance.getObjectId() == null) {
preparedStatement.setString(11, java.util.UUID.randomUUID().toString());
}
else {
preparedStatement.setString(11, ledgerBalance.getObjectId());
}
if (ledgerBalance.getVersionNumber() == null) {
preparedStatement.setLong(12, 1);
}
else {
preparedStatement.setLong(12, ledgerBalance.getVersionNumber());
}
preparedStatement.setBigDecimal(13, ledgerBalance.getAccountLineAnnualBalanceAmount().bigDecimalValue());
preparedStatement.setBigDecimal(14, ledgerBalance.getBeginningBalanceLineAmount().bigDecimalValue());
preparedStatement.setBigDecimal(15, ledgerBalance.getContractsGrantsBeginningBalanceAmount().bigDecimalValue());
preparedStatement.setBigDecimal(16, ledgerBalance.getMonth1Amount().bigDecimalValue());
preparedStatement.setBigDecimal(17, ledgerBalance.getMonth2Amount().bigDecimalValue());
preparedStatement.setBigDecimal(18, ledgerBalance.getMonth3Amount().bigDecimalValue());
preparedStatement.setBigDecimal(19, ledgerBalance.getMonth4Amount().bigDecimalValue());
preparedStatement.setBigDecimal(20, ledgerBalance.getMonth5Amount().bigDecimalValue());
preparedStatement.setBigDecimal(21, ledgerBalance.getMonth6Amount().bigDecimalValue());
preparedStatement.setBigDecimal(22, ledgerBalance.getMonth7Amount().bigDecimalValue());
preparedStatement.setBigDecimal(23, ledgerBalance.getMonth8Amount().bigDecimalValue());
preparedStatement.setBigDecimal(24, ledgerBalance.getMonth9Amount().bigDecimalValue());
preparedStatement.setBigDecimal(25, ledgerBalance.getMonth10Amount().bigDecimalValue());
preparedStatement.setBigDecimal(26, ledgerBalance.getMonth11Amount().bigDecimalValue());
preparedStatement.setBigDecimal(27, ledgerBalance.getMonth12Amount().bigDecimalValue());
preparedStatement.setBigDecimal(28, ledgerBalance.getMonth13Amount().bigDecimalValue());
preparedStatement.setTimestamp(29, currentTimestamp);
}
}.execute(LedgerBalance.class);
}
@Override
public void updateLedgerBalance(final LedgerBalance ledgerBalance, final Timestamp currentTimestamp) {
new UpdatingJdbcWrapper<LedgerBalance>() {
@Override
protected void populateStatement(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setBigDecimal(1, ledgerBalance.getAccountLineAnnualBalanceAmount().bigDecimalValue());
preparedStatement.setBigDecimal(2, ledgerBalance.getBeginningBalanceLineAmount().bigDecimalValue());
preparedStatement.setBigDecimal(3, ledgerBalance.getContractsGrantsBeginningBalanceAmount().bigDecimalValue());
preparedStatement.setBigDecimal(4, ledgerBalance.getMonth1Amount().bigDecimalValue());
preparedStatement.setBigDecimal(5, ledgerBalance.getMonth2Amount().bigDecimalValue());
preparedStatement.setBigDecimal(6, ledgerBalance.getMonth3Amount().bigDecimalValue());
preparedStatement.setBigDecimal(7, ledgerBalance.getMonth4Amount().bigDecimalValue());
preparedStatement.setBigDecimal(8, ledgerBalance.getMonth5Amount().bigDecimalValue());
preparedStatement.setBigDecimal(9, ledgerBalance.getMonth6Amount().bigDecimalValue());
preparedStatement.setBigDecimal(10, ledgerBalance.getMonth7Amount().bigDecimalValue());
preparedStatement.setBigDecimal(11, ledgerBalance.getMonth8Amount().bigDecimalValue());
preparedStatement.setBigDecimal(12, ledgerBalance.getMonth9Amount().bigDecimalValue());
preparedStatement.setBigDecimal(13, ledgerBalance.getMonth10Amount().bigDecimalValue());
preparedStatement.setBigDecimal(14, ledgerBalance.getMonth11Amount().bigDecimalValue());
preparedStatement.setBigDecimal(15, ledgerBalance.getMonth12Amount().bigDecimalValue());
preparedStatement.setBigDecimal(16, ledgerBalance.getMonth13Amount().bigDecimalValue());
preparedStatement.setTimestamp(17, currentTimestamp);
preparedStatement.setInt(18, ledgerBalance.getUniversityFiscalYear());
preparedStatement.setString(19, ledgerBalance.getChartOfAccountsCode());
preparedStatement.setString(20, ledgerBalance.getAccountNumber());
preparedStatement.setString(21, ledgerBalance.getSubAccountNumber());
preparedStatement.setString(22, ledgerBalance.getFinancialObjectCode());
preparedStatement.setString(23, ledgerBalance.getFinancialSubObjectCode());
preparedStatement.setString(24, ledgerBalance.getFinancialBalanceTypeCode());
preparedStatement.setString(25, ledgerBalance.getFinancialObjectTypeCode());
preparedStatement.setString(26, ledgerBalance.getPositionNumber());
preparedStatement.setString(27, ledgerBalance.getEmplid());
}
}.execute(LedgerBalance.class);
}
@Override
public void insertLedgerEntry(final LedgerEntry ledgerEntry) {
new InsertingJdbcWrapper<LedgerEntry>() {
@Override
protected void populateStatement(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, ledgerEntry.getUniversityFiscalYear());
preparedStatement.setString(2, ledgerEntry.getChartOfAccountsCode());
preparedStatement.setString(3, ledgerEntry.getAccountNumber());
preparedStatement.setString(4, ledgerEntry.getSubAccountNumber());
preparedStatement.setString(5, ledgerEntry.getFinancialObjectCode());
preparedStatement.setString(6, ledgerEntry.getFinancialSubObjectCode());
preparedStatement.setString(7, ledgerEntry.getFinancialBalanceTypeCode());
preparedStatement.setString(8, ledgerEntry.getFinancialObjectTypeCode());
preparedStatement.setString(9, ledgerEntry.getUniversityFiscalPeriodCode());
preparedStatement.setString(10, ledgerEntry.getFinancialDocumentTypeCode());
preparedStatement.setString(11, ledgerEntry.getFinancialSystemOriginationCode());
preparedStatement.setString(12, ledgerEntry.getDocumentNumber());
preparedStatement.setInt(13, ledgerEntry.getTransactionLedgerEntrySequenceNumber());
if (ledgerEntry.getObjectId() == null) {
preparedStatement.setString(14, java.util.UUID.randomUUID().toString());
}
else {
preparedStatement.setString(14, ledgerEntry.getObjectId());
}
if (ledgerEntry.getVersionNumber() == null) {
preparedStatement.setLong(15, 1);
}
else {
preparedStatement.setLong(15, ledgerEntry.getVersionNumber());
}
preparedStatement.setString(16, ledgerEntry.getPositionNumber());
preparedStatement.setString(17, ledgerEntry.getProjectCode());
preparedStatement.setString(18, ledgerEntry.getTransactionLedgerEntryDescription());
preparedStatement.setBigDecimal(19, ledgerEntry.getTransactionLedgerEntryAmount().bigDecimalValue());
preparedStatement.setString(20, ledgerEntry.getTransactionDebitCreditCode());
preparedStatement.setDate(21, ledgerEntry.getTransactionDate());
preparedStatement.setString(22, ledgerEntry.getOrganizationDocumentNumber());
preparedStatement.setString(23, ledgerEntry.getOrganizationReferenceId());
preparedStatement.setString(24, ledgerEntry.getReferenceFinancialDocumentTypeCode());
preparedStatement.setString(25, ledgerEntry.getReferenceFinancialSystemOriginationCode());
preparedStatement.setString(26, ledgerEntry.getReferenceFinancialDocumentNumber());
preparedStatement.setDate(27, ledgerEntry.getFinancialDocumentReversalDate());
preparedStatement.setString(28, ledgerEntry.getTransactionEncumbranceUpdateCode());
preparedStatement.setDate(29, ledgerEntry.getTransactionPostingDate());
preparedStatement.setDate(30, ledgerEntry.getPayPeriodEndDate());
preparedStatement.setBigDecimal(31, ledgerEntry.getTransactionTotalHours());
if (ledgerEntry.getPayrollEndDateFiscalYear() == null) {
preparedStatement.setNull(32, java.sql.Types.INTEGER);
}
else {
preparedStatement.setInt(32, ledgerEntry.getPayrollEndDateFiscalYear());
}
preparedStatement.setString(33, ledgerEntry.getPayrollEndDateFiscalPeriodCode());
preparedStatement.setString(34, ledgerEntry.getEmplid());
if (ledgerEntry.getEmployeeRecord() == null) {
preparedStatement.setNull(35, java.sql.Types.INTEGER);
}
else {
preparedStatement.setInt(35, ledgerEntry.getEmployeeRecord());
}
preparedStatement.setString(36, ledgerEntry.getEarnCode());
preparedStatement.setString(37, ledgerEntry.getPayGroup());
preparedStatement.setString(38, ledgerEntry.getSalaryAdministrationPlan());
preparedStatement.setString(39, ledgerEntry.getGrade());
preparedStatement.setString(40, ledgerEntry.getRunIdentifier());
preparedStatement.setString(41, ledgerEntry.getLaborLedgerOriginalChartOfAccountsCode());
preparedStatement.setString(42, ledgerEntry.getLaborLedgerOriginalAccountNumber());
preparedStatement.setString(43, ledgerEntry.getLaborLedgerOriginalSubAccountNumber());
preparedStatement.setString(44, ledgerEntry.getLaborLedgerOriginalFinancialObjectCode());
preparedStatement.setString(45, ledgerEntry.getLaborLedgerOriginalFinancialSubObjectCode());
preparedStatement.setString(46, ledgerEntry.getHrmsCompany());
preparedStatement.setString(47, ledgerEntry.getSetid());
preparedStatement.setTimestamp(48, ledgerEntry.getTransactionDateTimeStamp());
}
}.execute(LedgerEntry.class);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.kudu;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.kudu.KuduOutputFormat.KuduRecordWriter;
import org.apache.hadoop.hive.metastore.HiveMetaHook;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.kudu.Schema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides a HiveStorageHandler implementation for Apache Kudu.
*/
public class KuduStorageHandler extends DefaultStorageHandler implements HiveStoragePredicateHandler {
private static final Logger LOG = LoggerFactory.getLogger(KuduStorageHandler.class);
private static final String KUDU_PROPERTY_PREFIX = "kudu.";
/** Table Properties. Used in the hive table definition when creating a new table. */
public static final String KUDU_TABLE_ID_KEY = KUDU_PROPERTY_PREFIX + "table_id";
public static final String KUDU_TABLE_NAME_KEY = KUDU_PROPERTY_PREFIX + "table_name";
public static final String KUDU_MASTER_ADDRS_KEY = KUDU_PROPERTY_PREFIX + "master_addresses";
public static final List<String> KUDU_TABLE_PROPERTIES =
Arrays.asList(KUDU_TABLE_ID_KEY, KUDU_TABLE_NAME_KEY, KUDU_MASTER_ADDRS_KEY);
private Configuration conf;
@Override
public Class<? extends InputFormat> getInputFormatClass() {
return KuduInputFormat.class;
}
@Override
public Class<? extends OutputFormat> getOutputFormatClass() {
return KuduOutputFormat.class;
}
@Override
public Class<? extends AbstractSerDe> getSerDeClass() {
return KuduSerDe.class;
}
@Override
public HiveMetaHook getMetaHook() {
return null;
}
@Override
public HiveAuthorizationProvider getAuthorizationProvider() throws HiveException {
return new DefaultHiveAuthorizationProvider();
}
@Override
public Configuration getConf() {
return conf;
}
@Override
public void setConf(Configuration conf) {
this.conf = conf;
}
@Override
public void configureInputJobProperties(TableDesc tableDesc,
Map<String, String> jobProperties) {
configureJobProperties(tableDesc, jobProperties);
}
@Override
public void configureOutputJobProperties(TableDesc tableDesc,
Map<String, String> jobProperties) {
configureJobProperties(tableDesc, jobProperties);
}
@Override
public void configureTableJobProperties(TableDesc tableDesc,
Map<String, String> jobProperties) {
configureJobProperties(tableDesc, jobProperties);
}
@Override
public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
// Copied from the DruidStorageHandler.
if (UserGroupInformation.isSecurityEnabled()) {
// AM can not do Kerberos Auth so will do the input split generation in the HS2
LOG.debug("Setting {} to {} to enable split generation on HS2",
HiveConf.ConfVars.HIVE_AM_SPLIT_GENERATION.toString(),
Boolean.FALSE.toString());
jobConf.set(HiveConf.ConfVars.HIVE_AM_SPLIT_GENERATION.toString(), Boolean.FALSE.toString());
}
try {
addDependencyJars(jobConf, KuduStorageHandler.class);
} catch (IOException e) {
Throwables.propagate(e);
}
}
// Copied from the DruidStorageHandler.
private static void addDependencyJars(Configuration conf, Class<?>... classes)
throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Set<String> jars = new HashSet<>(conf.getStringCollection("tmpjars"));
for (Class<?> clazz : classes) {
if (clazz == null) {
continue;
}
final String path = Utilities.jarFinderGetJar(clazz);
if (path == null) {
throw new RuntimeException("Could not find jar for class " + clazz +
" in order to ship it to the cluster.");
}
if (!localFs.exists(new Path(path))) {
throw new RuntimeException("Could not validate jar file " + path + " for class " + clazz);
}
jars.add(path);
}
if (jars.isEmpty()) {
return;
}
//noinspection ToArrayCallWithZeroLengthArrayArgument
conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[jars.size()])));
}
private void configureJobProperties(TableDesc tableDesc,
Map<String, String> jobProperties) {
Properties tblProps = tableDesc.getProperties();
copyPropertiesFromTable(jobProperties, tblProps);
}
private void copyPropertiesFromTable(Map<String, String> jobProperties, Properties tblProps) {
for (String propToCopy : KUDU_TABLE_PROPERTIES) {
if (tblProps.containsKey(propToCopy)) {
String value = tblProps.getProperty(propToCopy);
conf.set(propToCopy, value);
jobProperties.put(propToCopy, value);
}
}
}
/**
* Gives the storage handler a chance to decompose a predicate.
* The storage handler should analyze the predicate and return the portion of it which
* cannot be evaluated during table access.
*
* @param jobConf contains a job configuration matching the one that will later be passed
* to getRecordReader and getSplits
* @param deserializer deserializer which will be used when fetching rows
* @param predicate predicate to be decomposed
* @return decomposed form of predicate, or null if no pushdown is possible at all
*/
@Override
public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer,
ExprNodeDesc predicate) {
Preconditions.checkArgument(deserializer instanceof KuduSerDe);
KuduSerDe serDe = (KuduSerDe) deserializer;
Schema schema = serDe.getSchema();
return KuduPredicateHandler.decompose(predicate, schema);
}
/**
* Used to fetch runtime information about storage handler during DESCRIBE EXTENDED statement.
*/
@Override
public StorageHandlerInfo getStorageHandlerInfo(Table table) throws MetaException {
return null;
}
}
|
|
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.util;
import org.apache.commons.math3.complex.Complex;
import org.apache.commons.math3.util.FastMath;
import org.nd4j.linalg.api.complex.IComplexDouble;
import org.nd4j.linalg.api.complex.IComplexFloat;
import org.nd4j.linalg.api.complex.IComplexNumber;
import org.nd4j.linalg.factory.Nd4j;
/**
* @author Adam Gibson
*/
public class ComplexUtil {
private ComplexUtil() {
}
/**
* Create complex number where the
* @param realComponents the real components for the complex
* @return the complex numbers based on the given real components
*/
public static IComplexNumber[][] complexNumbersFor(float[][] realComponents) {
IComplexNumber[][] ret = new IComplexNumber[realComponents.length][realComponents[0].length];
for(int i = 0; i < realComponents.length; i++)
for(int j = 0; j < realComponents[i].length; j++)
ret[i][j] = Nd4j.createComplexNumber(realComponents[i][j],0);
return ret;
}
/**
* Create complex number where the
* @param realComponents the real components for the complex
* @return the complex numbers based on the given real components
*/
public static IComplexNumber[][] complexNumbersFor(double[][] realComponents) {
IComplexNumber[][] ret = new IComplexNumber[realComponents.length][realComponents[0].length];
for(int i = 0; i < realComponents.length; i++)
for(int j = 0; j < realComponents[i].length; j++)
ret[i][j] = Nd4j.createComplexNumber(realComponents[i][j],0);
return ret;
}
/**
* Create complex number where the
* @param realComponents the real components for the complex
* @return the complex numbers based on the given real components
*/
public static IComplexNumber[] complexNumbersFor(float[] realComponents) {
IComplexNumber[] ret = new IComplexNumber[realComponents.length];
for(int i = 0; i < realComponents.length; i++)
ret[i] = Nd4j.createComplexNumber(realComponents[i],0);
return ret;
}
/**
* Create complex number where the
* @param realComponents the real components for the complex
* @return the complex numbers based on the given real components
*/
public static IComplexNumber[] complexNumbersFor(double[] realComponents) {
IComplexNumber[] ret = new IComplexNumber[realComponents.length];
for(int i = 0; i < realComponents.length; i++)
ret[i] = Nd4j.createComplexNumber(realComponents[i],0);
return ret;
}
/**
* Return the sin value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber atan(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).atan();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the sin value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber acos(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).acos();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the sin value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber asin(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).asin();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the sin value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber sin(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).sin();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the ceiling value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber ceil(IComplexNumber num) {
Complex c = new Complex(FastMath.ceil(num.realComponent().doubleValue()), FastMath.ceil(num.imaginaryComponent().doubleValue()));
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the floor value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber floor(IComplexNumber num) {
Complex c = new Complex(FastMath.floor(num.realComponent().doubleValue()), FastMath.floor(num.imaginaryComponent().doubleValue()));
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the log value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber neg(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).negate();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the log value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber log(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).log();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the absolute value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber sqrt(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).sqrt();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the absolute value of the given complex number
*
* @param num the number to getScalar the absolute value for
* @return the absolute value of this complex number
*/
public static IComplexNumber abs(IComplexNumber num) {
double c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).abs();
return Nd4j.createDouble(c, 0);
}
public static IComplexNumber round(IComplexNumber num) {
return Nd4j.createDouble(Math.round(num.realComponent().doubleValue()), Math.round(num.imaginaryComponent().doubleValue()));
}
/**
* Raise a complex number to a power
*
* @param num the number to raise
* @param power the power to raise to
* @return the number raised to a power
*/
public static IComplexNumber pow(IComplexNumber num, IComplexNumber power) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).pow(new Complex(power.realComponent().doubleValue(), power.imaginaryComponent().doubleValue()));
if (c.isNaN())
c = new Complex(Nd4j.EPS_THRESHOLD, 0.0);
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Raise a complex number to a power
*
* @param num the number to raise
* @param power the power to raise to
* @return the number raised to a power
*/
public static IComplexNumber pow(IComplexNumber num, double power) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).pow(power);
if (c.isNaN())
c = new Complex(Nd4j.EPS_THRESHOLD, 0.0);
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the cos of a complex number
*
* @param num the tanh of a complex number
* @return the tanh of a complex number
*/
public static IComplexNumber cos(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).cos();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the tanh of a complex number
*
* @param num the tanh of a complex number
* @return the tanh of a complex number
*/
public static IComplexNumber hardTanh(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).tanh();
if (c.getReal() < -1.0)
c = new Complex(-1.0, c.getImaginary());
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Return the tanh of a complex number
*
* @param num the tanh of a complex number
* @return the tanh of a complex number
*/
public static IComplexNumber tanh(IComplexNumber num) {
Complex c = new Complex(num.realComponent().doubleValue(), num.imaginaryComponent().doubleValue()).tanh();
return Nd4j.createDouble(c.getReal(), c.getImaginary());
}
/**
* Returns the exp of a complex number:
* Let r be the realComponent component and i be the imaginary
* Let ret be the complex number returned
* ret -> exp(r) * cos(i), exp(r) * sin(i)
* where the first number is the realComponent component
* and the second number is the imaginary component
*
* @param d the number to getFromOrigin the exp of
* @return the exponential of this complex number
*/
public static IComplexNumber exp(IComplexNumber d) {
if (d instanceof IComplexFloat)
return exp((IComplexFloat) d);
return exp((IComplexDouble) d);
}
/**
* Returns the exp of a complex number:
* Let r be the realComponent component and i be the imaginary
* Let ret be the complex number returned
* ret -> exp(r) * cos(i), exp(r) * sin(i)
* where the first number is the realComponent component
* and the second number is the imaginary component
*
* @param d the number to getFromOrigin the exp of
* @return the exponential of this complex number
*/
public static IComplexDouble exp(IComplexDouble d) {
return Nd4j.createDouble(FastMath.exp(d.realComponent()) * FastMath.cos(d.imaginaryComponent()), FastMath.exp(d.realComponent()) * FastMath.sin(d.imaginaryComponent()));
}
/**
* Returns the exp of a complex number:
* Let r be the realComponent component and i be the imaginary
* Let ret be the complex number returned
* ret -> exp(r) * cos(i), exp(r) * sin(i)
* where the first number is the realComponent component
* and the second number is the imaginary component
*
* @param d the number to getFromOrigin the exp of
* @return the exponential of this complex number
*/
public static IComplexFloat exp(IComplexFloat d) {
return Nd4j.createFloat((float) FastMath.exp(d.realComponent()) * (float) FastMath.cos(d.imaginaryComponent()), (float) FastMath.exp(d.realComponent()) * (float) FastMath.sin(d.imaginaryComponent()));
}
}
|
|
/*
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Saurabh Sejpal
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*
*/
package com.sejpalsaurabh.postmark.client.server;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.sejpalsaurabh.postmark.exception.PostmarkException;
import com.sejpalsaurabh.postmark.model.Account;
import com.sejpalsaurabh.postmark.model.DateTimeTypeAdapter;
import com.sejpalsaurabh.postmark.model.Server;
import com.sejpalsaurabh.postmark.model.SkipMeExclusionStrategy;
import com.sejpalsaurabh.postmark.response.PostmarkResponse;
import com.sejpalsaurabh.postmark.util.ProjectConstants;
import com.sejpalsaurabh.postmark.util.ProjectUtil;
import org.apache.http.client.HttpClient;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import java.io.IOException;
import java.util.List;
/**
*
* <p>This class will allow you the access <tt>Server API</tt>
*
* <p>
* You can check <a href="http://developer.postmarkapp.com/developer-api-servers.html" target="_blank">Server API</a>
* for more detail.
*
*
* @author Saurabh Sejpal
* @version 1.0
* @since 1.0
*/
public class PostmarkServerClient {
private static Logger logger = Logger.getLogger(PostmarkServerClient.class);
private static String accountToken;
private static GsonBuilder gsonBuilder = new GsonBuilder();
static {
gsonBuilder.registerTypeAdapter(DateTime.class, new DateTimeTypeAdapter());
gsonBuilder.setPrettyPrinting();
gsonBuilder.setExclusionStrategies(new SkipMeExclusionStrategy(Boolean.class));
gsonBuilder.disableHtmlEscaping();
if (ProjectUtil.isAccountTokenThere()) {
accountToken = new ProjectUtil().getPostMarkProperties().getProperty("account-key").trim();
} else {
logger.error("PostmarkServerClient -> static -> account-key not found");
}
}
public PostmarkServerClient() {
//Default Constructor
}
public List<Server> getServers() throws PostmarkException {
if (ProjectConstants.isAccountToken) {
HttpClient httpClient = HttpClientBuilder.create().build();
Account account = new Account();
try {
// Create get request to Postmark Account API endpoint
HttpGet method = new HttpGet(ProjectConstants.POSTMARK_ENDPOINT + ProjectConstants.SEPARATOR + ProjectConstants.SERVERS + "?count=" + ProjectConstants.MAXIMUM_RESULT_COUNT + "&offset=" + ProjectConstants.OFFSET);
// Add standard headers required by Postmark
method.addHeader("Accept", "application/json");
method.addHeader(ProjectConstants.POSTMARK_ACCOUNT_TOKEN, accountToken);
ResponseHandler<String> responseHandler = new BasicResponseHandler();
try {
String response = httpClient.execute(method, responseHandler);
logger.info("PostmarkServerClient -> getServers response: " + response);
account = gsonBuilder.create().fromJson(response, Account.class);
}
catch (IOException ioException) {
logger.error(ioException.getMessage());
throw new PostmarkException(ioException);
}
} catch (Exception exception) {
logger.error(exception.getMessage());
throw new PostmarkException(exception);
}
finally {
httpClient.getConnectionManager().shutdown();
}
return account.getServerList();
} else {
logger.error("account-key not found");
return null;
}
}
public Server getServer(long serverId) throws PostmarkException {
Server server = new Server();
if (ProjectConstants.isAccountToken) {
HttpClient httpClient = HttpClientBuilder.create().build();
try {
// Create get request to Postmark Account API endpoint
HttpGet method = new HttpGet(ProjectConstants.POSTMARK_ENDPOINT + ProjectConstants.SEPARATOR + ProjectConstants.SERVERS + ProjectConstants.SEPARATOR + serverId);
// Add standard headers required by Postmark
method.addHeader("Accept", "application/json");
method.addHeader(ProjectConstants.POSTMARK_ACCOUNT_TOKEN, accountToken);
ResponseHandler<String> responseHandler = new BasicResponseHandler();
try {
String response = httpClient.execute(method, responseHandler);
logger.info("PostmarkServerClient -> getServer response: " + response);
server = gsonBuilder.create().fromJson(response, Server.class);
}
catch (IOException ioException) {
logger.error(ioException.getMessage());
throw new PostmarkException(ioException);
}
} catch (Exception exception) {
logger.error(exception.getMessage());
throw new PostmarkException(exception);
}
finally {
httpClient.getConnectionManager().shutdown();
}
} else {
logger.error("account-key not found");
}
return server;
}
public Server createServer(Server server) throws PostmarkException {
if (ProjectConstants.isAccountToken) {
HttpClient httpClient = HttpClientBuilder.create().build();
Server theResponse = new Server();
try {
// Create post request to Postmark API endpoint
HttpPost method = new HttpPost(ProjectConstants.POSTMARK_ENDPOINT + ProjectConstants.SEPARATOR + ProjectConstants.SERVERS);
// Add standard headers required by Postmark
method.addHeader("Accept", "application/json");
method.addHeader("Content-Type", "application/json; charset=utf-8");
method.addHeader(ProjectConstants.POSTMARK_ACCOUNT_TOKEN, accountToken);
// Convert the message into JSON content
Gson gson = gsonBuilder.create();
String messageContents = gson.toJson(server);
logger.info("Create Server contents: " + messageContents);
// Add JSON as payload to post request
StringEntity payload = new StringEntity(messageContents, "UTF-8");
method.setEntity(payload);
ResponseHandler<String> responseHandler = new BasicResponseHandler();
try {
String response = httpClient.execute(method, responseHandler);
logger.info("PostmarkServerClient -> createServer response: " + response);
theResponse = gsonBuilder.create().fromJson(response, Server.class);
//TODO : Add Proper Error Handling for with API Error Codes
//TODO : Check : http://developer.postmarkapp.com/developer-api-overview.html#error-codes
} catch (HttpResponseException httpResponseException) {
logger.error(httpResponseException.getMessage());
throw new PostmarkException(httpResponseException);
}
} catch (Exception exception) {
logger.error("There has been an error while creating server : " + exception.getMessage());
throw new PostmarkException(exception);
}
finally {
httpClient.getConnectionManager().shutdown();
}
return theResponse;
} else {
logger.error("account-key not found");
return null;
}
}
public Server editServer(Server server) throws PostmarkException {
//TODO : http://developer.postmarkapp.com/developer-api-servers.html#edit-server
if (ProjectConstants.isAccountToken) {
HttpClient httpClient = HttpClientBuilder.create().build();
Server theResponse = new Server();
try {
// Create post request to Postmark API endpoint
HttpPut method = new HttpPut(ProjectConstants.POSTMARK_ENDPOINT + ProjectConstants.SEPARATOR + ProjectConstants.SERVERS + ProjectConstants.SEPARATOR + server.getServerId());
// Add standard headers required by Postmark
method.addHeader("Accept", "application/json");
method.addHeader("Content-Type", "application/json; charset=utf-8");
method.addHeader(ProjectConstants.POSTMARK_ACCOUNT_TOKEN, accountToken);
server.setServerId(0);
// Convert the message into JSON content
Gson gson = gsonBuilder.create();
String messageContents = gson.toJson(server);
logger.info("Edit Server Contents : " + messageContents);
// Add JSON as payload to post request
StringEntity payload = new StringEntity(messageContents, "UTF-8");
method.setEntity(payload);
ResponseHandler<String> responseHandler = new BasicResponseHandler();
try {
String response = httpClient.execute(method, responseHandler);
logger.info("PostmarkServerClient -> Edit Server response: " + response);
theResponse = gsonBuilder.create().fromJson(response, Server.class);
//TODO : Add Proper Error Handling for with API Error Codes
//TODO : Check : http://developer.postmarkapp.com/developer-api-overview.html#error-codes
} catch (HttpResponseException httpResponseException) {
logger.error(httpResponseException.getMessage());
throw new PostmarkException(httpResponseException);
}
} catch (Exception exception) {
logger.error("There has been an error while editing server : " + exception.getMessage());
throw new PostmarkException(exception);
}
finally {
httpClient.getConnectionManager().shutdown();
}
return theResponse;
} else {
logger.error("account-key not found");
return null;
}
}
public PostmarkResponse deleteServer(long serverId) throws PostmarkException {
if (ProjectConstants.isAccountToken) {
HttpClient httpClient = HttpClientBuilder.create().build();
PostmarkResponse postmarkResponse = new PostmarkResponse();
try{
HttpDelete method = new HttpDelete(ProjectConstants.POSTMARK_ENDPOINT + ProjectConstants.SEPARATOR + ProjectConstants.SERVERS + ProjectConstants.SEPARATOR + serverId);
// Add standard headers required by Postmark
method.addHeader("Accept", "application/json");
method.addHeader("Content-Type", "application/json; charset=utf-8");
method.addHeader(ProjectConstants.POSTMARK_ACCOUNT_TOKEN, accountToken);
ResponseHandler<String> responseHandler = new BasicResponseHandler();
try {
String response = httpClient.execute(method, responseHandler);
logger.info("PostmarkServerClient -> deleteServer response: " + response);
postmarkResponse = gsonBuilder.create().fromJson(response, PostmarkResponse.class);
} catch (HttpResponseException httpResponseException) {
logger.error(httpResponseException.getMessage());
throw new PostmarkException(httpResponseException);
}
} catch (Exception exception) {
logger.error("There has been an error while editing server : " + exception.getMessage());
throw new PostmarkException(exception);
}
finally {
httpClient.getConnectionManager().shutdown();
}
return postmarkResponse;
} else {
logger.error("account-key not found");
return null;
}
}
}
|
|
/*
* Copyright 2012-2014, Continuuity, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.continuuity.loom.store.provisioner;
import com.continuuity.loom.account.Account;
import com.continuuity.loom.common.conf.Constants;
import com.continuuity.loom.provisioner.plugin.PluginType;
import com.continuuity.loom.provisioner.plugin.ResourceCollection;
import com.continuuity.loom.provisioner.plugin.ResourceMeta;
import com.continuuity.loom.provisioner.plugin.ResourceStatus;
import com.continuuity.loom.provisioner.plugin.ResourceType;
import com.continuuity.loom.spec.plugin.ResourceTypeFormat;
import com.continuuity.loom.spec.plugin.ResourceTypeSpecification;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import java.util.Set;
/**
*
*/
public abstract class PluginResourceMetaStoreTest {
ResourceType type1 = new ResourceType(PluginType.AUTOMATOR, "chef-solo", "cookbooks");
ResourceType type2 = new ResourceType(PluginType.PROVIDER, "openstack", "keys");
Account account1 = new Account(Constants.ADMIN_USER, "tenant1");
Account account2 = new Account(Constants.ADMIN_USER, "tenant2");
abstract PluginMetaStoreService getPluginResourceMetaStoreService() throws Exception;
abstract void clearData() throws Exception;
@After
public void cleanupTest() throws Exception {
clearData();
}
@Test
public void testGetNumResources() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
// for account1 write 6 resources (7 but one is deleted) in all different states
service.getResourceTypeView(account1, type1).add(new ResourceMeta("r1", 1, ResourceStatus.ACTIVE));
service.getResourceTypeView(account1, type1).add(new ResourceMeta("r1", 2, ResourceStatus.INACTIVE));
service.getResourceTypeView(account1, type1).add(new ResourceMeta("r2", 1, ResourceStatus.INACTIVE));
service.getResourceTypeView(account1, type1).add(new ResourceMeta("r2", 2, ResourceStatus.STAGED));
service.getResourceTypeView(account1, type2).add(new ResourceMeta("r3", 1, ResourceStatus.RECALLED));
service.getResourceTypeView(account1, type2).add(new ResourceMeta("r3", 2, ResourceStatus.STAGED));
service.getResourceTypeView(account1, type2).add(new ResourceMeta("r3", 3, ResourceStatus.STAGED));
service.getResourceTypeView(account1, type2).delete("r3", 3);
Assert.assertEquals(6, service.getAccountView(account1).numResources());
// account 2 should have nothing
Assert.assertEquals(0, service.getAccountView(account2).numResources());
}
@Test
public void testWriteDeleteExistsGetWithinAccount() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view = service.getResourceTypeView(account1, type1);
String name = "name";
int version = 1;
ResourceMeta meta = new ResourceMeta(name, version);
view.add(meta);
Assert.assertTrue(view.exists(name, version));
Assert.assertEquals(meta, view.get(name, version));
view.delete(name, version);
Assert.assertFalse(view.exists(name, version));
Assert.assertNull(view.get(name, version));
}
@Test
public void testAccountSeparation() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view1 = service.getResourceTypeView(account1, type1);
PluginResourceTypeView view2 = service.getResourceTypeView(account2, type1);
String name = "name";
int version = 1;
ResourceMeta meta = new ResourceMeta(name, version);
view1.add(meta);
Assert.assertTrue(view1.exists(name, version));
Assert.assertFalse(view2.exists(name, version));
Assert.assertEquals(meta, view1.get(name, version));
Assert.assertNull(view2.get(name, version));
view2.add(meta);
Assert.assertTrue(view1.exists(name, version));
Assert.assertTrue(view2.exists(name, version));
Assert.assertEquals(meta, view1.get(name, version));
Assert.assertEquals(meta, view2.get(name, version));
view1.delete(name, version);
Assert.assertFalse(view1.exists(name, version));
Assert.assertTrue(view2.exists(name, version));
Assert.assertNull(view1.get(name, version));
Assert.assertEquals(meta, view2.get(name, version));
view2.delete(name, version);
Assert.assertFalse(view1.exists(name, version));
Assert.assertFalse(view2.exists(name, version));
Assert.assertNull(view1.get(name, version));
Assert.assertNull(view2.get(name, version));
}
@Test
public void testTypeSeparation() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view1 = service.getResourceTypeView(account1, type1);
PluginResourceTypeView view2 = service.getResourceTypeView(account1, type2);
String name = "name";
int version = 1;
ResourceMeta meta = new ResourceMeta(name, version);
view1.add(meta);
Assert.assertTrue(view1.exists(name, version));
Assert.assertFalse(view2.exists(name, version));
Assert.assertEquals(meta, view1.get(name, version));
Assert.assertNull(view2.get(name, version));
view2.add(meta);
Assert.assertTrue(view1.exists(name, version));
Assert.assertTrue(view2.exists(name, version));
Assert.assertEquals(meta, view1.get(name, version));
Assert.assertEquals(meta, view2.get(name, version));
view1.delete(name, version);
Assert.assertFalse(view1.exists(name, version));
Assert.assertTrue(view2.exists(name, version));
Assert.assertNull(view1.get(name, version));
Assert.assertEquals(meta, view2.get(name, version));
view2.delete(name, version);
Assert.assertFalse(view1.exists(name, version));
Assert.assertFalse(view2.exists(name, version));
Assert.assertNull(view1.get(name, version));
Assert.assertNull(view2.get(name, version));
}
@Test(expected = IllegalArgumentException.class)
public void testOnlyAdminsHaveAccess() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
service.getResourceTypeView(new Account("notadmin", "tenant"),
new ResourceType(PluginType.AUTOMATOR, "chef-solo", "cookbooks"));
}
@Test
public void testGetAll() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view = service.getResourceTypeView(account1, type1);
ResourceMeta hadoop1 = new ResourceMeta("hadoop", 1, ResourceStatus.INACTIVE);
ResourceMeta hadoop2 = new ResourceMeta("hadoop", 2, ResourceStatus.STAGED);
ResourceMeta hadoop3 = new ResourceMeta("hadoop", 3, ResourceStatus.ACTIVE);
ResourceMeta mysql1 = new ResourceMeta("mysql", 1, ResourceStatus.STAGED);
ResourceMeta mysql2 = new ResourceMeta("mysql", 2, ResourceStatus.ACTIVE);
ResourceMeta apache = new ResourceMeta("apache", 1, ResourceStatus.RECALLED);
Set<ResourceMeta> all = ImmutableSet.of(hadoop1, hadoop2, hadoop3, mysql1, mysql2, apache);
Set<ResourceMeta> hadoops = ImmutableSet.of(hadoop1, hadoop2, hadoop3);
Set<ResourceMeta> mysqls = ImmutableSet.of(mysql1, mysql2);
Set<ResourceMeta> apaches = ImmutableSet.of(apache);
for (ResourceMeta meta : all) {
view.add(meta);
}
Assert.assertEquals(
ImmutableMap.<String, Set<ResourceMeta>>of(
"hadoop", ImmutableSet.<ResourceMeta>of(hadoop1, hadoop2, hadoop3),
"mysql", ImmutableSet.<ResourceMeta>of(mysql1, mysql2),
"apache", ImmutableSet.<ResourceMeta>of(apache)),
ImmutableMap.copyOf(view.getAll())
);
Assert.assertEquals(hadoops, ImmutableSet.copyOf(view.getAll("hadoop")));
Assert.assertEquals(mysqls, ImmutableSet.copyOf(view.getAll("mysql")));
Assert.assertEquals(apaches, ImmutableSet.copyOf(view.getAll("apache")));
// test get active
Assert.assertEquals(
ImmutableMap.<String, Set<ResourceMeta>>of(
"hadoop", ImmutableSet.<ResourceMeta>of(hadoop3),
"mysql", ImmutableSet.<ResourceMeta>of(mysql2)),
ImmutableMap.copyOf(view.getAll(ResourceStatus.ACTIVE))
);
Assert.assertEquals(Sets.newHashSet(hadoop3), view.getAll("hadoop", ResourceStatus.ACTIVE));
Assert.assertEquals(Sets.newHashSet(mysql2), view.getAll("mysql", ResourceStatus.ACTIVE));
Assert.assertTrue(view.getAll("apache", ResourceStatus.ACTIVE).isEmpty());
// test get staged
Assert.assertEquals(
ImmutableMap.<String, Set<ResourceMeta>>of(
"hadoop", ImmutableSet.<ResourceMeta>of(hadoop2),
"mysql", ImmutableSet.<ResourceMeta>of(mysql1)),
ImmutableMap.copyOf(view.getAll(ResourceStatus.STAGED))
);
Assert.assertEquals(Sets.newHashSet(hadoop2), view.getAll("hadoop", ResourceStatus.STAGED));
Assert.assertEquals(Sets.newHashSet(mysql1), view.getAll("mysql", ResourceStatus.STAGED));
Assert.assertTrue(view.getAll("apache", ResourceStatus.STAGED).isEmpty());
// test get recalled
Assert.assertEquals(
ImmutableMap.<String, Set<ResourceMeta>>of(
"apache", ImmutableSet.<ResourceMeta>of(apache)),
ImmutableMap.copyOf(view.getAll(ResourceStatus.RECALLED))
);
Assert.assertTrue(view.getAll("hadoop", ResourceStatus.RECALLED).isEmpty());
Assert.assertTrue(view.getAll("mysql", ResourceStatus.RECALLED).isEmpty());
Assert.assertEquals(Sets.newHashSet(apache), view.getAll("apache", ResourceStatus.RECALLED));
// test get inactive
Assert.assertEquals(
ImmutableMap.<String, Set<ResourceMeta>>of(
"hadoop", ImmutableSet.<ResourceMeta>of(hadoop1)),
ImmutableMap.copyOf(view.getAll(ResourceStatus.INACTIVE))
);
Assert.assertEquals(Sets.newHashSet(hadoop1), view.getAll("hadoop", ResourceStatus.INACTIVE));
Assert.assertTrue(view.getAll("mysql", ResourceStatus.INACTIVE).isEmpty());
Assert.assertTrue(view.getAll("apache", ResourceStatus.INACTIVE).isEmpty());
}
@Test
public void testStage() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view = service.getResourceTypeView(account1, type1);
ResourceMeta hadoop1 = new ResourceMeta("hadoop", 1, ResourceStatus.INACTIVE);
ResourceMeta hadoop2 = new ResourceMeta("hadoop", 2, ResourceStatus.RECALLED);
ResourceMeta hadoop3 = new ResourceMeta("hadoop", 3, ResourceStatus.INACTIVE);
ResourceMeta mysql = new ResourceMeta("mysql", 1, ResourceStatus.STAGED);
ResourceMeta apache = new ResourceMeta("apache", 1, ResourceStatus.ACTIVE);
ResourceMeta php1 = new ResourceMeta("php", 1, ResourceStatus.ACTIVE);
ResourceMeta php2 = new ResourceMeta("php", 2, ResourceStatus.INACTIVE);
view.add(hadoop1);
view.add(hadoop2);
view.add(hadoop3);
view.add(mysql);
view.add(apache);
view.add(php1);
view.add(php2);
// check no-ops
view.stage(mysql.getName(), mysql.getVersion());
Assert.assertEquals(ResourceStatus.STAGED, view.get(mysql.getName(), mysql.getVersion()).getStatus());
view.stage(apache.getName(), apache.getVersion());
Assert.assertEquals(ResourceStatus.ACTIVE, view.get(apache.getName(), apache.getVersion()).getStatus());
// check staging a recalled makes it active
view.stage(hadoop2.getName(), hadoop2.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.ACTIVE, view.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
// check staging from inactive
view.stage(hadoop1.getName(), hadoop1.getVersion());
Assert.assertEquals(ResourceStatus.STAGED, view.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.RECALLED, view.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
// check staging deactivates previous staged version
view.stage(hadoop3.getName(), hadoop3.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.RECALLED, view.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.STAGED, view.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
}
@Test
public void testStageOnNothingIsNoOp() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view = service.getResourceTypeView(account1, type1);
ResourceMeta hadoop = new ResourceMeta("hadoop", 1, ResourceStatus.STAGED);
view.add(hadoop);
// if we stage a non-existent version, the current staged version should not be affected
view.stage(hadoop.getName(), hadoop.getVersion() + 1);
Assert.assertEquals(ResourceStatus.STAGED, view.get(hadoop.getName(), hadoop.getVersion()).getStatus());
}
@Test
public void testRecall() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view = service.getResourceTypeView(account1, type1);
ResourceMeta hadoop1 = new ResourceMeta("hadoop", 1, ResourceStatus.INACTIVE);
ResourceMeta hadoop2 = new ResourceMeta("hadoop", 2, ResourceStatus.RECALLED);
ResourceMeta hadoop3 = new ResourceMeta("hadoop", 3, ResourceStatus.INACTIVE);
ResourceMeta mysql1 = new ResourceMeta("mysql", 1, ResourceStatus.STAGED);
ResourceMeta mysql2 = new ResourceMeta("mysql", 2, ResourceStatus.ACTIVE);
view.add(hadoop1);
view.add(hadoop2);
view.add(hadoop3);
view.add(mysql1);
view.add(mysql2);
// check no-ops
view.recall(hadoop1.getName(), hadoop1.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.RECALLED, view.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
view.recall(hadoop2.getName(), hadoop2.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.RECALLED, view.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
view.recall(hadoop3.getName(), hadoop3.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.RECALLED, view.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
// check recalling a staged resource deactivates it
view.recall(mysql1.getName(), mysql1.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(mysql1.getName(), mysql1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.ACTIVE, view.get(mysql2.getName(), mysql2.getVersion()).getStatus());
// check recalling an active moves it to recalled
view.recall(mysql2.getName(), mysql2.getVersion());
Assert.assertEquals(ResourceStatus.INACTIVE, view.get(mysql1.getName(), mysql1.getVersion()).getStatus());
Assert.assertEquals(ResourceStatus.RECALLED, view.get(mysql2.getName(), mysql2.getVersion()).getStatus());
}
@Test
public void testSyncStatus() throws Exception {
PluginMetaStoreService service = getPluginResourceMetaStoreService();
PluginResourceTypeView view1 = service.getResourceTypeView(account1, type1);
PluginResourceTypeView view2 = service.getResourceTypeView(account1, type2);
ResourceMeta hadoop1 = new ResourceMeta("hadoop", 1, ResourceStatus.INACTIVE);
ResourceMeta hadoop2 = new ResourceMeta("hadoop", 2, ResourceStatus.STAGED);
ResourceMeta hadoop3 = new ResourceMeta("hadoop", 3, ResourceStatus.RECALLED);
ResourceMeta mysql1 = new ResourceMeta("mysql", 1, ResourceStatus.INACTIVE);
ResourceMeta mysql2 = new ResourceMeta("mysql", 2, ResourceStatus.STAGED);
ResourceMeta apache1 = new ResourceMeta("apache", 1, ResourceStatus.INACTIVE);
ResourceMeta apache2 = new ResourceMeta("apache", 2, ResourceStatus.RECALLED);
view1.add(hadoop1);
view1.add(hadoop2);
view1.add(hadoop3);
view1.add(mysql1);
view1.add(mysql2);
view1.add(apache1);
view1.add(apache2);
ResourceMeta bob1 = new ResourceMeta("bob", 1, ResourceStatus.INACTIVE);
ResourceMeta bob2 = new ResourceMeta("bob", 2, ResourceStatus.STAGED);
ResourceMeta sally1 = new ResourceMeta("sally", 1, ResourceStatus.ACTIVE);
ResourceMeta sue1 = new ResourceMeta("sue", 1, ResourceStatus.RECALLED);
view2.add(bob1);
view2.add(bob2);
view2.add(sally1);
view2.add(sue1);
ResourceCollection syncedResources = new ResourceCollection();
syncedResources.addResources(type1, new ResourceTypeSpecification(ResourceTypeFormat.ARCHIVE, null),
ImmutableSet.of(hadoop2, mysql2));
syncedResources.addResources(type2, new ResourceTypeSpecification(ResourceTypeFormat.FILE, "400"),
ImmutableSet.of(bob2, sally1));
service.getAccountView(account1).syncResources(syncedResources);
// inactive should stay inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view1.get(hadoop1.getName(), hadoop1.getVersion()).getStatus());
// staged should become active
Assert.assertEquals(ResourceStatus.ACTIVE, view1.get(hadoop2.getName(), hadoop2.getVersion()).getStatus());
// recalled should become inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view1.get(hadoop3.getName(), hadoop3.getVersion()).getStatus());
// inactive should stay inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view1.get(mysql1.getName(), mysql1.getVersion()).getStatus());
// staged should become active
Assert.assertEquals(ResourceStatus.ACTIVE, view1.get(mysql2.getName(), mysql2.getVersion()).getStatus());
// inactive should stay inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view1.get(apache1.getName(), apache1.getVersion()).getStatus());
// recalled should become inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view1.get(apache2.getName(), apache2.getVersion()).getStatus());
// check other type
// inactive should stay inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view2.get(bob1.getName(), bob1.getVersion()).getStatus());
// stage should become active
Assert.assertEquals(ResourceStatus.ACTIVE, view2.get(bob2.getName(), bob2.getVersion()).getStatus());
// active should stay active
Assert.assertEquals(ResourceStatus.ACTIVE, view2.get(sally1.getName(), sally1.getVersion()).getStatus());
// recalled should become inactive
Assert.assertEquals(ResourceStatus.INACTIVE, view2.get(sue1.getName(), sue1.getVersion()).getStatus());
}
}
|
|
package org.tinymediamanager.scraper.xbmc;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.lang3.StringUtils;
public class XbmcUtil {
/**
* tries to detect the XBMC/Kodi installation folder
*
* @return File or NULL
*/
public static File detectXbmcFolder() {
String[] appFolder = { "Kodi", "kodi", "xbmc", "XMBC" };
String[] installFolder = { System.getenv("ProgramFiles(x86)"), System.getenv("ProgramFiles"), System.getenv("ProgramData"), "/usr/share/",
"/usr/lib/", "/Applications/XBMC.app/Contents/Resources" };
for (String i : installFolder) {
if (StringUtils.isEmpty(i)) {
continue;
}
for (String a : appFolder) {
File path = new File(i, a);
if (path.exists()) {
return path;
}
}
}
return null;
}
/**
* tries to detect the XBMC/Kodi userdata folder
*
* @return File or NULL
*/
public static File detectXbmcUserdataFolder() {
// http://wiki.xbmc.org/?title=Userdata
String[] appFolder = { "Kodi", "XMBC", "kodi", ".xbmc", "xbmc", ".kodi" };
String[] userFolder = { System.getenv("APPDATA"), System.getProperty("user.home"),
"/Users/" + System.getProperty("user.name") + "/Library/Application Support" };
for (String u : userFolder) {
if (StringUtils.isEmpty(u)) {
continue;
}
for (String a : appFolder) {
File path = new File(u, a);
if (path.exists()) {
return path;
}
}
}
return null;
}
public static ArrayList<XbmcScraper> getXbmcAddons(IOFileFilter dirFilter, IOFileFilter fileFilter) {
ArrayList<XbmcScraper> scrapers = new ArrayList<XbmcScraper>();
File addons = new File("xbmc_scraper");
System.out.println("searchig for scrapers in: " + addons);
if (addons != null && addons.exists()) {
Collection<File> files = FileUtils.listFiles(addons, fileFilter, dirFilter);
for (File f : files) {
XbmcScraper x = new XbmcScraper(f.getParentFile()); // parent = folder
if (!scrapers.contains(x)) {
scrapers.add(x);
}
}
}
addons = new File(detectXbmcUserdataFolder(), "addons");
System.out.println("searchig for scrapers in: " + addons);
if (addons != null && addons.exists()) {
Collection<File> files = FileUtils.listFiles(addons, fileFilter, dirFilter);
for (File f : files) {
XbmcScraper x = new XbmcScraper(f.getParentFile()); // parent = folder
if (!scrapers.contains(x)) {
scrapers.add(x);
}
}
}
addons = new File(detectXbmcFolder(), "addons");
System.out.println("searchig for scrapers in: " + addons);
if (addons != null && addons.exists()) {
Collection<File> files = FileUtils.listFiles(addons, fileFilter, dirFilter);
for (File f : files) {
XbmcScraper x = new XbmcScraper(f.getParentFile()); // parent = folder
if (!scrapers.contains(x)) {
scrapers.add(x);
}
}
}
return scrapers;
}
/**
* returns a list of all found scraper addons.xml
*
* @return
*/
public static ArrayList<XbmcScraper> getAllScrapers() {
ArrayList<XbmcScraper> scrapers = new ArrayList<XbmcScraper>();
IOFileFilter dirFilter = new IOFileFilter() {
@Override
public boolean accept(File arg0, String arg1) {
return false;
}
@Override
public boolean accept(File arg0) {
return arg0.getName().startsWith("metadata") && !arg0.getName().contains("common");
}
};
IOFileFilter fileFilter = new IOFileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.getName().equals("addon.xml");
}
@Override
public boolean accept(File arg0, String arg1) {
return false;
}
};
scrapers = getXbmcAddons(dirFilter, fileFilter);
if (scrapers.size() == 0) {
System.out.println("Meh - could not find any scrapers...");
}
return scrapers;
}
/**
* returns a list of all found common addons.xml
*
* @return
*/
public static ArrayList<XbmcScraper> getAllCommon() {
ArrayList<XbmcScraper> common = new ArrayList<XbmcScraper>();
IOFileFilter dirFilter = new IOFileFilter() {
@Override
public boolean accept(File arg0, String arg1) {
return false;
}
@Override
public boolean accept(File arg0) {
// all common metadata folders for additional inclusion
return arg0.getName().startsWith("metadata") && arg0.getName().contains("common");
}
};
IOFileFilter fileFilter = new IOFileFilter() {
@Override
public boolean accept(File pathname) {
// all XML files in scraper folder - but not the addon.xml itself
return pathname.getName().equals("addon.xml");
// return pathname.getName().endsWith("xml") && !pathname.getName().equals("addon.xml");
}
@Override
public boolean accept(File arg0, String arg1) {
return false;
}
};
common = getXbmcAddons(dirFilter, fileFilter);
if (common.size() == 0) {
System.out.println("Meh - could not find any common folders...");
}
return common;
}
/**
* returns a list of all found common addons.xml
*
* @return
*/
public static ArrayList<File> getAllCommonXMLs() {
ArrayList<File> common = new ArrayList<File>();
IOFileFilter dirFilter = new IOFileFilter() {
@Override
public boolean accept(File arg0, String arg1) {
return false;
}
@Override
public boolean accept(File arg0) {
// all common metadata folders for additional inclusion
return arg0.getName().startsWith("metadata") && arg0.getName().contains("common");
}
};
IOFileFilter fileFilter = new IOFileFilter() {
@Override
public boolean accept(File pathname) {
// all XML files in scraper folder - but not the addon.xml itself
return pathname.getName().endsWith("xml") && !pathname.getName().equals("addon.xml");
}
@Override
public boolean accept(File arg0, String arg1) {
return false;
}
};
for (XbmcScraper sc : getAllCommon()) {
Collection<File> files = FileUtils.listFiles(sc.getFolder(), fileFilter, dirFilter);
for (File f : files) {
if (!common.contains(f)) {
System.out.println("Found common: " + f);
common.add(f);
}
else {
System.out.println("Skipped common: " + f);
}
}
}
if (common.size() == 0) {
System.out.println("Meh - could not find any common function...");
}
return common;
}
}
|
|
package mediabrowser.model.dlna;
import mediabrowser.model.dto.*;
import mediabrowser.model.entities.*;
import mediabrowser.model.extensions.*;
import mediabrowser.model.logging.*;
import mediabrowser.model.mediainfo.*;
import mediabrowser.model.session.*;
public class StreamBuilder
{
private ILocalPlayer _localPlayer;
private ILogger _logger;
public StreamBuilder(ILocalPlayer localPlayer, ILogger logger)
{
_localPlayer = localPlayer;
_logger = logger;
}
public StreamBuilder(ILogger logger)
{
this(new NullLocalPlayer(), logger);
}
public final StreamInfo BuildAudioItem(AudioOptions options)
{
ValidateAudioInput(options);
java.util.ArrayList<MediaSourceInfo> mediaSources = new java.util.ArrayList<MediaSourceInfo>();
for (MediaSourceInfo i : options.getMediaSources())
{
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(options.getMediaSourceId()) || StringHelper.EqualsIgnoreCase(i.getId(), options.getMediaSourceId()))
{
mediaSources.add(i);
}
}
java.util.ArrayList<StreamInfo> streams = new java.util.ArrayList<StreamInfo>();
for (MediaSourceInfo i : mediaSources)
{
StreamInfo streamInfo = BuildAudioItem(i, options);
if (streamInfo != null)
{
streams.add(streamInfo);
}
}
for (StreamInfo stream : streams)
{
stream.setDeviceId(options.getDeviceId());
stream.setDeviceProfileId(options.getProfile().getId());
}
return GetOptimalStream(streams);
}
public final StreamInfo BuildVideoItem(VideoOptions options)
{
ValidateInput(options);
java.util.ArrayList<MediaSourceInfo> mediaSources = new java.util.ArrayList<MediaSourceInfo>();
for (MediaSourceInfo i : options.getMediaSources())
{
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(options.getMediaSourceId()) || StringHelper.EqualsIgnoreCase(i.getId(), options.getMediaSourceId()))
{
mediaSources.add(i);
}
}
java.util.ArrayList<StreamInfo> streams = new java.util.ArrayList<StreamInfo>();
for (MediaSourceInfo i : mediaSources)
{
StreamInfo streamInfo = BuildVideoItem(i, options);
if (streamInfo != null)
{
streams.add(streamInfo);
}
}
for (StreamInfo stream : streams)
{
stream.setDeviceId(options.getDeviceId());
stream.setDeviceProfileId(options.getProfile().getId());
}
return GetOptimalStream(streams);
}
private StreamInfo GetOptimalStream(java.util.ArrayList<StreamInfo> streams)
{
streams = StreamInfoSorter.SortMediaSources(streams);
for (StreamInfo stream : streams)
{
return stream;
}
return null;
}
private StreamInfo BuildAudioItem(MediaSourceInfo item, AudioOptions options)
{
StreamInfo tempVar = new StreamInfo();
tempVar.setItemId(options.getItemId());
tempVar.setMediaType(DlnaProfileType.Audio);
tempVar.setMediaSource(item);
tempVar.setRunTimeTicks(item.getRunTimeTicks());
tempVar.setContext(options.getContext());
tempVar.setDeviceProfile(options.getProfile());
StreamInfo playlistItem = tempVar;
MediaStream audioStream = item.GetDefaultAudioStream(null);
java.util.ArrayList<PlayMethod> directPlayMethods = GetAudioDirectPlayMethods(item, audioStream, options);
if (directPlayMethods.size() > 0)
{
String audioCodec = audioStream == null ? null : audioStream.getCodec();
// Make sure audio codec profiles are satisfied
if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(audioCodec))
{
ConditionProcessor conditionProcessor = new ConditionProcessor();
java.util.ArrayList<ProfileCondition> conditions = new java.util.ArrayList<ProfileCondition>();
for (CodecProfile i : options.getProfile().getCodecProfiles())
{
if (i.getType() == CodecType.Audio && i.ContainsCodec(audioCodec, item.getContainer()))
{
for (ProfileCondition c : i.getConditions())
{
conditions.add(c);
}
}
}
Integer audioChannels = audioStream.getChannels();
Integer audioBitrate = audioStream.getBitRate();
boolean all = true;
for (ProfileCondition c : conditions)
{
if (!conditionProcessor.IsAudioConditionSatisfied(c, audioChannels, audioBitrate))
{
all = false;
break;
}
}
if (all)
{
if (item.getProtocol() == MediaProtocol.File && directPlayMethods.contains(PlayMethod.DirectPlay) && _localPlayer.CanAccessFile(item.getPath()))
{
playlistItem.setPlayMethod(PlayMethod.DirectPlay);
}
else if (item.getProtocol() == MediaProtocol.Http && directPlayMethods.contains(PlayMethod.DirectPlay) && _localPlayer.CanAccessUrl(item.getPath(), item.getRequiredHttpHeaders().size() > 0))
{
playlistItem.setPlayMethod(PlayMethod.DirectPlay);
}
else if (directPlayMethods.contains(PlayMethod.DirectStream))
{
playlistItem.setPlayMethod(PlayMethod.DirectStream);
}
playlistItem.setContainer(item.getContainer());
return playlistItem;
}
}
}
TranscodingProfile transcodingProfile = null;
for (TranscodingProfile i : options.getProfile().getTranscodingProfiles())
{
if (i.getType() == playlistItem.getMediaType() && i.getContext() == options.getContext())
{
transcodingProfile = i;
break;
}
}
if (transcodingProfile != null)
{
if (!item.getSupportsTranscoding())
{
return null;
}
playlistItem.setPlayMethod(PlayMethod.Transcode);
playlistItem.setTranscodeSeekInfo(transcodingProfile.getTranscodeSeekInfo());
playlistItem.setEstimateContentLength(transcodingProfile.getEstimateContentLength());
playlistItem.setContainer(transcodingProfile.getContainer());
playlistItem.setAudioCodec(transcodingProfile.getAudioCodec());
playlistItem.setSubProtocol(transcodingProfile.getProtocol());
java.util.ArrayList<CodecProfile> audioCodecProfiles = new java.util.ArrayList<CodecProfile>();
for (CodecProfile i : options.getProfile().getCodecProfiles())
{
if (i.getType() == CodecType.Audio && i.ContainsCodec(transcodingProfile.getAudioCodec(), transcodingProfile.getContainer()))
{
audioCodecProfiles.add(i);
}
if (audioCodecProfiles.size() >= 1)
{
break;
}
}
java.util.ArrayList<ProfileCondition> audioTranscodingConditions = new java.util.ArrayList<ProfileCondition>();
for (CodecProfile i : audioCodecProfiles)
{
for (ProfileCondition c : i.getConditions())
{
audioTranscodingConditions.add(c);
}
}
ApplyTranscodingConditions(playlistItem, audioTranscodingConditions);
// Honor requested max channels
if (options.getMaxAudioChannels() != null)
{
Integer tempVar2 = playlistItem.getMaxAudioChannels();
int currentValue = (tempVar2 != null) ? tempVar2 : options.getMaxAudioChannels();
playlistItem.setMaxAudioChannels(Math.min(options.getMaxAudioChannels(), currentValue));
}
Integer tempVar3 = options.getAudioTranscodingBitrate();
int configuredBitrate = (tempVar3 != null) ? tempVar3 : ((options.getContext() == EncodingContext.Static ? options.getProfile().getMusicSyncBitrate() : options.getProfile().getMusicStreamingTranscodingBitrate()) != null) ? (options.getContext() == EncodingContext.Static ? options.getProfile().getMusicSyncBitrate() : options.getProfile().getMusicStreamingTranscodingBitrate()) : 128000;
Integer tempVar4 = playlistItem.getAudioBitrate();
playlistItem.setAudioBitrate(Math.min(configuredBitrate, (tempVar4 != null) ? tempVar4 : configuredBitrate));
}
return playlistItem;
}
private Integer GetBitrateForDirectPlayCheck(MediaSourceInfo item, AudioOptions options)
{
if (item.getProtocol() == MediaProtocol.File)
{
return options.getProfile().getMaxStaticBitrate();
}
return options.GetMaxBitrate();
}
private java.util.ArrayList<PlayMethod> GetAudioDirectPlayMethods(MediaSourceInfo item, MediaStream audioStream, AudioOptions options)
{
DirectPlayProfile directPlayProfile = null;
for (DirectPlayProfile i : options.getProfile().getDirectPlayProfiles())
{
if (i.getType() == DlnaProfileType.Audio && IsAudioDirectPlaySupported(i, item, audioStream))
{
directPlayProfile = i;
break;
}
}
java.util.ArrayList<PlayMethod> playMethods = new java.util.ArrayList<PlayMethod>();
if (directPlayProfile != null)
{
// While options takes the network and other factors into account. Only applies to direct stream
if (item.getSupportsDirectStream() && IsAudioEligibleForDirectPlay(item, options.GetMaxBitrate()))
{
playMethods.add(PlayMethod.DirectStream);
}
// The profile describes what the device supports
// If device requirements are satisfied then allow both direct stream and direct play
if (item.getSupportsDirectPlay() && IsAudioEligibleForDirectPlay(item, GetBitrateForDirectPlayCheck(item, options)))
{
playMethods.add(PlayMethod.DirectPlay);
}
}
return playMethods;
}
private Integer GetDefaultSubtitleStreamIndex(MediaSourceInfo item, SubtitleProfile[] subtitleProfiles)
{
int highestScore = -1;
for (MediaStream stream : item.getMediaStreams())
{
if (stream.getType() == MediaStreamType.Subtitle && stream.getScore() != null)
{
if (stream.getScore() > highestScore)
{
highestScore = stream.getScore();
}
}
}
java.util.ArrayList<MediaStream> topStreams = new java.util.ArrayList<MediaStream>();
for (MediaStream stream : item.getMediaStreams())
{
if (stream.getType() == MediaStreamType.Subtitle && stream.getScore() != null && stream.getScore() == highestScore)
{
topStreams.add(stream);
}
}
// If multiple streams have an equal score, try to pick the most efficient one
if (topStreams.size() > 1)
{
for (MediaStream stream : topStreams)
{
for (SubtitleProfile profile : subtitleProfiles)
{
if (profile.getMethod() == SubtitleDeliveryMethod.External && StringHelper.EqualsIgnoreCase(profile.getFormat(), stream.getCodec()))
{
return stream.getIndex();
}
}
}
}
// If no optimization panned out, just use the original default
return item.getDefaultSubtitleStreamIndex();
}
private StreamInfo BuildVideoItem(MediaSourceInfo item, VideoOptions options)
{
StreamInfo tempVar = new StreamInfo();
tempVar.setItemId(options.getItemId());
tempVar.setMediaType(DlnaProfileType.Video);
tempVar.setMediaSource(item);
tempVar.setRunTimeTicks(item.getRunTimeTicks());
tempVar.setContext(options.getContext());
tempVar.setDeviceProfile(options.getProfile());
StreamInfo playlistItem = tempVar;
Integer tempVar2 = options.getSubtitleStreamIndex();
playlistItem.setSubtitleStreamIndex((tempVar2 != null) ? tempVar2 : GetDefaultSubtitleStreamIndex(item, options.getProfile().getSubtitleProfiles()));
MediaStream subtitleStream = playlistItem.getSubtitleStreamIndex() != null ? item.GetMediaStream(MediaStreamType.Subtitle, playlistItem.getSubtitleStreamIndex()) : null;
Integer tempVar3 = options.getAudioStreamIndex();
MediaStream audioStream = item.GetDefaultAudioStream((tempVar3 != null) ? tempVar3 : item.getDefaultAudioStreamIndex());
Integer audioStreamIndex = null;
if (audioStream != null)
{
audioStreamIndex = audioStream.getIndex();
}
MediaStream videoStream = item.getVideoStream();
// TODO: This doesn't accout for situation of device being able to handle media bitrate, but wifi connection not fast enough
boolean isEligibleForDirectPlay = IsEligibleForDirectPlay(item, GetBitrateForDirectPlayCheck(item, options), subtitleStream, options);
boolean isEligibleForDirectStream = IsEligibleForDirectPlay(item, options.GetMaxBitrate(), subtitleStream, options);
String tempVar4 = options.getProfile().getName();
String tempVar5 = item.getPath();
_logger.Debug("Profile: {0}, Path: {1}, isEligibleForDirectPlay: {2}, isEligibleForDirectStream: {3}", (tempVar4 != null) ? tempVar4 : "Unknown Profile", (tempVar5 != null) ? tempVar5 : "Unknown path", isEligibleForDirectPlay, isEligibleForDirectStream);
if (isEligibleForDirectPlay || isEligibleForDirectStream)
{
// See if it can be direct played
PlayMethod directPlay = GetVideoDirectPlayProfile(options.getProfile(), item, videoStream, audioStream, isEligibleForDirectPlay, isEligibleForDirectStream);
if (directPlay != null)
{
playlistItem.setPlayMethod(directPlay);
playlistItem.setContainer(item.getContainer());
if (subtitleStream != null)
{
SubtitleProfile subtitleProfile = GetSubtitleProfile(subtitleStream, options.getProfile().getSubtitleProfiles(), options.getContext());
playlistItem.setSubtitleDeliveryMethod(subtitleProfile.getMethod());
playlistItem.setSubtitleFormat(subtitleProfile.getFormat());
}
return playlistItem;
}
}
// Can't direct play, find the transcoding profile
TranscodingProfile transcodingProfile = null;
for (TranscodingProfile i : options.getProfile().getTranscodingProfiles())
{
if (i.getType() == playlistItem.getMediaType() && i.getContext() == options.getContext())
{
transcodingProfile = i;
break;
}
}
if (transcodingProfile != null)
{
if (!item.getSupportsTranscoding())
{
return null;
}
if (subtitleStream != null)
{
SubtitleProfile subtitleProfile = GetSubtitleProfile(subtitleStream, options.getProfile().getSubtitleProfiles(), options.getContext());
playlistItem.setSubtitleDeliveryMethod(subtitleProfile.getMethod());
playlistItem.setSubtitleFormat(subtitleProfile.getFormat());
}
playlistItem.setPlayMethod(PlayMethod.Transcode);
playlistItem.setContainer(transcodingProfile.getContainer());
playlistItem.setEstimateContentLength(transcodingProfile.getEstimateContentLength());
playlistItem.setTranscodeSeekInfo(transcodingProfile.getTranscodeSeekInfo());
playlistItem.setAudioCodec(transcodingProfile.getAudioCodec().split("[,]", -1)[0]);
playlistItem.setVideoCodec(transcodingProfile.getVideoCodec());
playlistItem.setSubProtocol(transcodingProfile.getProtocol());
playlistItem.setAudioStreamIndex(audioStreamIndex);
java.util.ArrayList<ProfileCondition> videoTranscodingConditions = new java.util.ArrayList<ProfileCondition>();
for (CodecProfile i : options.getProfile().getCodecProfiles())
{
if (i.getType() == CodecType.Video && i.ContainsCodec(transcodingProfile.getVideoCodec(), transcodingProfile.getContainer()))
{
for (ProfileCondition c : i.getConditions())
{
videoTranscodingConditions.add(c);
}
break;
}
}
ApplyTranscodingConditions(playlistItem, videoTranscodingConditions);
java.util.ArrayList<ProfileCondition> audioTranscodingConditions = new java.util.ArrayList<ProfileCondition>();
for (CodecProfile i : options.getProfile().getCodecProfiles())
{
if (i.getType() == CodecType.VideoAudio && i.ContainsCodec(transcodingProfile.getAudioCodec(), transcodingProfile.getContainer()))
{
for (ProfileCondition c : i.getConditions())
{
audioTranscodingConditions.add(c);
}
break;
}
}
ApplyTranscodingConditions(playlistItem, audioTranscodingConditions);
// Honor requested max channels
if (options.getMaxAudioChannels() != null)
{
Integer tempVar6 = playlistItem.getMaxAudioChannels();
int currentValue = (tempVar6 != null) ? tempVar6 : options.getMaxAudioChannels();
playlistItem.setMaxAudioChannels(Math.min(options.getMaxAudioChannels(), currentValue));
}
int audioBitrate = GetAudioBitrate(options.GetMaxBitrate(), playlistItem.getTargetAudioChannels(), playlistItem.getTargetAudioCodec(), audioStream);
Integer tempVar7 = playlistItem.getAudioBitrate();
playlistItem.setAudioBitrate(Math.min((tempVar7 != null) ? tempVar7 : audioBitrate, audioBitrate));
Integer maxBitrateSetting = options.GetMaxBitrate();
// Honor max rate
if (maxBitrateSetting != null)
{
int videoBitrate = maxBitrateSetting;
if (playlistItem.getAudioBitrate() != null)
{
videoBitrate -= playlistItem.getAudioBitrate();
}
// Make sure the video bitrate is lower than bitrate settings but at least 64k
Integer tempVar8 = playlistItem.getVideoBitrate();
int currentValue = (tempVar8 != null) ? tempVar8 : videoBitrate;
playlistItem.setVideoBitrate(Math.max(Math.min(videoBitrate, currentValue), 64000));
}
}
return playlistItem;
}
private int GetAudioBitrate(Integer maxTotalBitrate, Integer targetAudioChannels, String targetAudioCodec, MediaStream audioStream)
{
int defaultBitrate = 128000;
if (targetAudioChannels != null)
{
if (targetAudioChannels >= 5 && ((maxTotalBitrate != null) ? maxTotalBitrate : 0) >= 2000000)
{
defaultBitrate = 320000;
}
}
int encoderAudioBitrateLimit = Integer.MAX_VALUE;
if (audioStream != null)
{
// Seeing webm encoding failures when source has 1 audio channel and 22k bitrate.
// Any attempts to transcode over 64k will fail
if (audioStream.getChannels() != null && audioStream.getChannels() == 1)
{
Integer tempVar = audioStream.getBitRate();
if (((tempVar != null) ? tempVar : 0) < 64000)
{
encoderAudioBitrateLimit = 64000;
}
}
}
return Math.min(defaultBitrate, encoderAudioBitrateLimit);
}
private PlayMethod GetVideoDirectPlayProfile(DeviceProfile profile, MediaSourceInfo mediaSource, MediaStream videoStream, MediaStream audioStream, boolean isEligibleForDirectPlay, boolean isEligibleForDirectStream)
{
// See if it can be direct played
DirectPlayProfile directPlay = null;
for (DirectPlayProfile i : profile.getDirectPlayProfiles())
{
if (i.getType() == DlnaProfileType.Video && IsVideoDirectPlaySupported(i, mediaSource, videoStream, audioStream))
{
directPlay = i;
break;
}
}
if (directPlay == null)
{
String tempVar = profile.getName();
String tempVar2 = mediaSource.getPath();
_logger.Debug("Profile: {0}, No direct play profiles found for Path: {1}", (tempVar != null) ? tempVar : "Unknown Profile", (tempVar2 != null) ? tempVar2 : "Unknown path");
return null;
}
String container = mediaSource.getContainer();
java.util.ArrayList<ProfileCondition> conditions = new java.util.ArrayList<ProfileCondition>();
for (ContainerProfile i : profile.getContainerProfiles())
{
if (i.getType() == DlnaProfileType.Video && ListHelper.ContainsIgnoreCase(i.GetContainers(), container))
{
for (ProfileCondition c : i.getConditions())
{
conditions.add(c);
}
}
}
ConditionProcessor conditionProcessor = new ConditionProcessor();
Integer width = videoStream == null ? null : videoStream.getWidth();
Integer height = videoStream == null ? null : videoStream.getHeight();
Integer bitDepth = videoStream == null ? null : videoStream.getBitDepth();
Integer videoBitrate = videoStream == null ? null : videoStream.getBitRate();
Double videoLevel = videoStream == null ? null : videoStream.getLevel();
String videoProfile = videoStream == null ? null : videoStream.getProfile();
Float tempVar3 = videoStream.getAverageFrameRate();
Float videoFramerate = videoStream == null ? null : (tempVar3 != null) ? tempVar3 : videoStream.getAverageFrameRate();
Boolean isAnamorphic = videoStream == null ? null : videoStream.getIsAnamorphic();
Boolean isCabac = videoStream == null ? null : videoStream.getIsCabac();
Integer audioBitrate = audioStream == null ? null : audioStream.getBitRate();
Integer audioChannels = audioStream == null ? null : audioStream.getChannels();
String audioProfile = audioStream == null ? null : audioStream.getProfile();
TransportStreamTimestamp timestamp = videoStream == null ? TransportStreamTimestamp.None : mediaSource.getTimestamp();
Integer packetLength = videoStream == null ? null : videoStream.getPacketLength();
Integer refFrames = videoStream == null ? null : videoStream.getRefFrames();
Integer numAudioStreams = mediaSource.GetStreamCount(MediaStreamType.Audio);
Integer numVideoStreams = mediaSource.GetStreamCount(MediaStreamType.Video);
// Check container conditions
for (ProfileCondition i : conditions)
{
if (!conditionProcessor.IsVideoConditionSatisfied(i, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isCabac, refFrames, numVideoStreams, numAudioStreams))
{
LogConditionFailure(profile, "VideoContainerProfile", i, mediaSource);
return null;
}
}
String videoCodec = videoStream == null ? null : videoStream.getCodec();
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(videoCodec))
{
String tempVar4 = profile.getName();
String tempVar5 = mediaSource.getPath();
_logger.Debug("Profile: {0}, DirectPlay=false. Reason=Unknown video codec. Path: {1}", (tempVar4 != null) ? tempVar4 : "Unknown Profile", (tempVar5 != null) ? tempVar5 : "Unknown path");
return null;
}
conditions = new java.util.ArrayList<ProfileCondition>();
for (CodecProfile i : profile.getCodecProfiles())
{
if (i.getType() == CodecType.Video && i.ContainsCodec(videoCodec, container))
{
for (ProfileCondition c : i.getConditions())
{
conditions.add(c);
}
}
}
for (ProfileCondition i : conditions)
{
if (!conditionProcessor.IsVideoConditionSatisfied(i, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isCabac, refFrames, numVideoStreams, numAudioStreams))
{
LogConditionFailure(profile, "VideoCodecProfile", i, mediaSource);
return null;
}
}
if (audioStream != null)
{
String audioCodec = audioStream.getCodec();
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(audioCodec))
{
String tempVar6 = profile.getName();
String tempVar7 = mediaSource.getPath();
_logger.Debug("Profile: {0}, DirectPlay=false. Reason=Unknown audio codec. Path: {1}", (tempVar6 != null) ? tempVar6 : "Unknown Profile", (tempVar7 != null) ? tempVar7 : "Unknown path");
return null;
}
conditions = new java.util.ArrayList<ProfileCondition>();
for (CodecProfile i : profile.getCodecProfiles())
{
if (i.getType() == CodecType.VideoAudio && i.ContainsCodec(audioCodec, container))
{
for (ProfileCondition c : i.getConditions())
{
conditions.add(c);
}
}
}
for (ProfileCondition i : conditions)
{
Boolean isSecondaryAudio = audioStream == null ? null : mediaSource.IsSecondaryAudio(audioStream);
if (!conditionProcessor.IsVideoAudioConditionSatisfied(i, audioChannels, audioBitrate, audioProfile, isSecondaryAudio))
{
LogConditionFailure(profile, "VideoAudioCodecProfile", i, mediaSource);
return null;
}
}
}
if (isEligibleForDirectPlay && mediaSource.getSupportsDirectPlay())
{
if (mediaSource.getProtocol() == MediaProtocol.Http)
{
if (_localPlayer.CanAccessUrl(mediaSource.getPath(), mediaSource.getRequiredHttpHeaders().size() > 0))
{
return PlayMethod.DirectPlay;
}
}
else if (mediaSource.getProtocol() == MediaProtocol.File)
{
if (_localPlayer.CanAccessFile(mediaSource.getPath()))
{
return PlayMethod.DirectPlay;
}
}
}
if (isEligibleForDirectStream && mediaSource.getSupportsDirectStream())
{
return PlayMethod.DirectStream;
}
return null;
}
private void LogConditionFailure(DeviceProfile profile, String type, ProfileCondition condition, MediaSourceInfo mediaSource)
{
String tempVar = profile.getName();
String tempVar2 = condition.getValue();
String tempVar3 = mediaSource.getPath();
_logger.Debug("Profile: {0}, DirectPlay=false. Reason={1}.{2} Condition: {3}. ConditionValue: {4}. IsRequired: {5}. Path: {6}", type, (tempVar != null) ? tempVar : "Unknown Profile", condition.getProperty(), condition.getCondition(), (tempVar2 != null) ? tempVar2 : "", condition.getIsRequired(), (tempVar3 != null) ? tempVar3 : "Unknown path");
}
private boolean IsEligibleForDirectPlay(MediaSourceInfo item, Integer maxBitrate, MediaStream subtitleStream, VideoOptions options)
{
if (subtitleStream != null)
{
SubtitleProfile subtitleProfile = GetSubtitleProfile(subtitleStream, options.getProfile().getSubtitleProfiles(), options.getContext());
if (subtitleProfile.getMethod() != SubtitleDeliveryMethod.External && subtitleProfile.getMethod() != SubtitleDeliveryMethod.Embed)
{
return false;
}
}
return IsAudioEligibleForDirectPlay(item, maxBitrate);
}
public static SubtitleProfile GetSubtitleProfile(MediaStream subtitleStream, SubtitleProfile[] subtitleProfiles, EncodingContext context)
{
// Look for an external profile that matches the stream type (text/graphical)
for (SubtitleProfile profile : subtitleProfiles)
{
boolean requiresConversion = !StringHelper.EqualsIgnoreCase(subtitleStream.getCodec(), profile.getFormat());
if (!profile.SupportsLanguage(subtitleStream.getLanguage()))
{
continue;
}
if (profile.getMethod() == SubtitleDeliveryMethod.External && subtitleStream.getIsTextSubtitleStream() == MediaStream.IsTextFormat(profile.getFormat()))
{
if (!requiresConversion)
{
return profile;
}
if (subtitleStream.getSupportsExternalStream())
{
return profile;
}
// For sync we can handle the longer extraction times
if (context.getValue() == EncodingContext.Static.getValue() && subtitleStream.getIsTextSubtitleStream())
{
return profile;
}
}
}
for (SubtitleProfile profile : subtitleProfiles)
{
boolean requiresConversion = !StringHelper.EqualsIgnoreCase(subtitleStream.getCodec(), profile.getFormat());
if (!profile.SupportsLanguage(subtitleStream.getLanguage()))
{
continue;
}
if (profile.getMethod() == SubtitleDeliveryMethod.Embed && subtitleStream.getIsTextSubtitleStream() == MediaStream.IsTextFormat(profile.getFormat()))
{
if (!requiresConversion)
{
return profile;
}
return profile;
}
}
SubtitleProfile tempVar = new SubtitleProfile();
tempVar.setMethod(SubtitleDeliveryMethod.Encode);
tempVar.setFormat(subtitleStream.getCodec());
return tempVar;
}
private boolean IsAudioEligibleForDirectPlay(MediaSourceInfo item, Integer maxBitrate)
{
// Honor the max bitrate setting
return maxBitrate == null || (item.getBitrate() != null && item.getBitrate() <= maxBitrate);
}
private void ValidateInput(VideoOptions options)
{
ValidateAudioInput(options);
if (options.getAudioStreamIndex() != null && tangible.DotNetToJavaStringHelper.isNullOrEmpty(options.getMediaSourceId()))
{
throw new IllegalArgumentException("MediaSourceId is required when a specific audio stream is requested");
}
if (options.getSubtitleStreamIndex() != null && tangible.DotNetToJavaStringHelper.isNullOrEmpty(options.getMediaSourceId()))
{
throw new IllegalArgumentException("MediaSourceId is required when a specific subtitle stream is requested");
}
}
private void ValidateAudioInput(AudioOptions options)
{
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(options.getItemId()))
{
throw new IllegalArgumentException("ItemId is required");
}
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(options.getDeviceId()))
{
throw new IllegalArgumentException("DeviceId is required");
}
if (options.getProfile() == null)
{
throw new IllegalArgumentException("Profile is required");
}
if (options.getMediaSources() == null)
{
throw new IllegalArgumentException("MediaSources is required");
}
}
private void ApplyTranscodingConditions(StreamInfo item, Iterable<ProfileCondition> conditions)
{
for (ProfileCondition condition : conditions)
{
String value = condition.getValue();
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(value))
{
continue;
}
// No way to express this
if (condition.getCondition() == ProfileConditionType.GreaterThanEqual)
{
continue;
}
switch (condition.getProperty())
{
case AudioBitrate:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num = new tangible.RefObject<Integer>(num);
boolean tempVar = IntHelper.TryParseCultureInvariant(value, tempRef_num);
num = tempRef_num.argValue;
if (tempVar)
{
item.setAudioBitrate(num);
}
break;
}
case AudioChannels:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num2 = new tangible.RefObject<Integer>(num);
boolean tempVar2 = IntHelper.TryParseCultureInvariant(value, tempRef_num2);
num = tempRef_num2.argValue;
if (tempVar2)
{
item.setMaxAudioChannels(num);
}
break;
}
case IsCabac:
{
boolean val = false;
tangible.RefObject<Boolean> tempRef_val = new tangible.RefObject<Boolean>(val);
boolean tempVar3 = BoolHelper.TryParseCultureInvariant(value, tempRef_val);
val = tempRef_val.argValue;
if (tempVar3)
{
if (condition.getCondition() == ProfileConditionType.Equals)
{
item.setCabac(val);
}
else if (condition.getCondition() == ProfileConditionType.NotEquals)
{
item.setCabac(!val);
}
}
break;
}
case IsAnamorphic:
case AudioProfile:
case Has64BitOffsets:
case PacketLength:
case NumAudioStreams:
case NumVideoStreams:
case IsSecondaryAudio:
case VideoTimestamp:
{
// Not supported yet
break;
}
case RefFrames:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num3 = new tangible.RefObject<Integer>(num);
boolean tempVar4 = IntHelper.TryParseCultureInvariant(value, tempRef_num3);
num = tempRef_num3.argValue;
if (tempVar4)
{
item.setMaxRefFrames(num);
}
break;
}
case VideoBitDepth:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num4 = new tangible.RefObject<Integer>(num);
boolean tempVar5 = IntHelper.TryParseCultureInvariant(value, tempRef_num4);
num = tempRef_num4.argValue;
if (tempVar5)
{
item.setMaxVideoBitDepth(num);
}
break;
}
case VideoProfile:
{
item.setVideoProfile(((value != null) ? value : "").split("[|]", -1)[0]);
break;
}
case Height:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num5 = new tangible.RefObject<Integer>(num);
boolean tempVar6 = IntHelper.TryParseCultureInvariant(value, tempRef_num5);
num = tempRef_num5.argValue;
if (tempVar6)
{
item.setMaxHeight(num);
}
break;
}
case VideoBitrate:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num6 = new tangible.RefObject<Integer>(num);
boolean tempVar7 = IntHelper.TryParseCultureInvariant(value, tempRef_num6);
num = tempRef_num6.argValue;
if (tempVar7)
{
item.setVideoBitrate(num);
}
break;
}
case VideoFramerate:
{
float num = 0F;
tangible.RefObject<Float> tempRef_num7 = new tangible.RefObject<Float>(num);
boolean tempVar8 = FloatHelper.TryParseCultureInvariant(value, tempRef_num7);
num = tempRef_num7.argValue;
if (tempVar8)
{
item.setMaxFramerate(num);
}
break;
}
case VideoLevel:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num8 = new tangible.RefObject<Integer>(num);
boolean tempVar9 = IntHelper.TryParseCultureInvariant(value, tempRef_num8);
num = tempRef_num8.argValue;
if (tempVar9)
{
item.setVideoLevel(num);
}
break;
}
case Width:
{
int num = 0;
tangible.RefObject<Integer> tempRef_num9 = new tangible.RefObject<Integer>(num);
boolean tempVar10 = IntHelper.TryParseCultureInvariant(value, tempRef_num9);
num = tempRef_num9.argValue;
if (tempVar10)
{
item.setMaxWidth(num);
}
break;
}
default:
throw new IllegalArgumentException("Unrecognized ProfileConditionValue");
}
}
}
private boolean IsAudioDirectPlaySupported(DirectPlayProfile profile, MediaSourceInfo item, MediaStream audioStream)
{
if (profile.getContainer().length() > 0)
{
// Check container type
String tempVar = item.getContainer();
String mediaContainer = (tempVar != null) ? tempVar : "";
boolean any = false;
for (String i : profile.GetContainers())
{
if (StringHelper.EqualsIgnoreCase(i, mediaContainer))
{
any = true;
break;
}
}
if (!any)
{
return false;
}
}
return true;
}
private boolean IsVideoDirectPlaySupported(DirectPlayProfile profile, MediaSourceInfo item, MediaStream videoStream, MediaStream audioStream)
{
if (profile.getContainer().length() > 0)
{
// Check container type
String tempVar = item.getContainer();
String mediaContainer = (tempVar != null) ? tempVar : "";
boolean any = false;
for (String i : profile.GetContainers())
{
if (StringHelper.EqualsIgnoreCase(i, mediaContainer))
{
any = true;
break;
}
}
if (!any)
{
return false;
}
}
// Check video codec
java.util.ArrayList<String> videoCodecs = profile.GetVideoCodecs();
if (videoCodecs.size() > 0)
{
String videoCodec = videoStream == null ? null : videoStream.getCodec();
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(videoCodec) || !ListHelper.ContainsIgnoreCase(videoCodecs, videoCodec))
{
return false;
}
}
java.util.ArrayList<String> audioCodecs = profile.GetAudioCodecs();
if (audioCodecs.size() > 0)
{
// Check audio codecs
String audioCodec = audioStream == null ? null : audioStream.getCodec();
if (tangible.DotNetToJavaStringHelper.isNullOrEmpty(audioCodec) || !ListHelper.ContainsIgnoreCase(audioCodecs, audioCodec))
{
return false;
}
}
return true;
}
}
|
|
/*
* Licensed to ObjectStyle LLC under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ObjectStyle LLC licenses
* this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.bootique.jdbc.junit5;
import io.bootique.di.BQModule;
import io.bootique.di.Binder;
import io.bootique.di.Key;
import io.bootique.jdbc.junit5.connector.DbConnector;
import io.bootique.jdbc.junit5.connector.ExecStatementBuilder;
import io.bootique.jdbc.junit5.datasource.DataSourceHolder;
import io.bootique.jdbc.junit5.datasource.DriverDataSource;
import io.bootique.jdbc.junit5.init.DbInitializer;
import io.bootique.jdbc.junit5.metadata.DbMetadata;
import io.bootique.jdbc.junit5.script.SqlScriptRunner;
import io.bootique.jdbc.junit5.tester.DataManager;
import io.bootique.jdbc.junit5.tester.DataSourcePropertyBuilder;
import io.bootique.junit5.BQTestScope;
import io.bootique.junit5.scope.BQAfterScopeCallback;
import io.bootique.junit5.scope.BQBeforeMethodCallback;
import io.bootique.junit5.scope.BQBeforeScopeCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import javax.sql.DataSource;
import java.sql.Connection;
import java.util.Objects;
/**
* A JUnit 5 extension that manages a single test database. DbTester is declared in a unit test and handles database
* startup, schema and data initialization and shutdown. A single database controlled by DbTester can be used by
* one or more BQRuntimes. This class is abstract. Specific testers (such as DerbyTester or TestcontainersTester)
* are provided in separate modules.
*
* @since 2.0.M1
*/
public abstract class DbTester<SELF extends DbTester> implements BQBeforeScopeCallback, BQAfterScopeCallback, BQBeforeMethodCallback {
protected final DbInitializer initializer;
protected String[] deleteTablesInInsertOrder;
protected final DataSourceHolder dataSourceHolder;
protected DbConnector connector;
public DbTester() {
this.dataSourceHolder = new DataSourceHolder();
this.initializer = new DbInitializer();
}
public DataSource getDataSource() {
return dataSourceHolder;
}
public String getDbUrl() {
return dataSourceHolder.getDbUrl();
}
/**
* Returns test DB "connector" that is a wrapper around DB DataSource providing access to DB metadata, and various
* query builders. In most cases instead of DbConnector, you should use {@link Table} (via {@link #getTable(String)})
* to work with the DB. Connector is only needed to run SQL directly.
*
* @since 2.0.B1
*/
public DbConnector getConnector() {
return Objects.requireNonNull(connector, "'connector' not initialized. Called outside of JUnit lifecycle?");
}
public DbMetadata getMetadata() {
return getConnector().getMetadata();
}
public Table getTable(String name) {
return getConnector().getTable(name);
}
public ExecStatementBuilder execStatement() {
return getConnector().execStatement();
}
public Connection getConnection() {
return getConnector().getConnection();
}
protected void configure(Binder binder, String dataSourceName) {
bindSelf(binder, dataSourceName);
configureDataSource(binder, dataSourceName);
}
protected void bindSelf(Binder binder, String dataSourceName) {
binder.bind(Key.get(DbTester.class, dataSourceName)).toInstance(this);
}
protected void configureDataSource(Binder binder, String dataSourceName) {
DataSourcePropertyBuilder.create(binder, dataSourceName).property("type", "bqjdbctest");
}
/**
* Executes provided SQL script after the DB startup. The script would usually contain database schema and test
* data. Assumes statements are separated with ";" character.
*
* @param initDBScript a location of the SQL script in Bootique {@link io.bootique.resource.ResourceFactory} format.
* @return this tester
*/
public SELF initDB(String initDBScript) {
return initDB(initDBScript, null);
}
/**
* Executes provided SQL script after the DB startup. The script would usually contain database schema and test
* data.
*
* @param initDBScript a location of the SQL script in Bootique {@link io.bootique.resource.ResourceFactory} format.
* @param delimiter SQL statements delimiter in the "initDBScript". An explicit delimiter may be useful when
* the file contains common DB delimiters in the middle of stored procedure declarations, etc.
* @return this tester
*/
public SELF initDB(String initDBScript, String delimiter) {
initializer.addScript(initDBScript, delimiter);
return (SELF) this;
}
public SELF initDB(JdbcOp initFunction) {
initializer.addFunction(initFunction);
return (SELF) this;
}
/**
* @deprecated since 2.0.B1 in favor of {@link #initDBWithLiquibaseChangelog(String)}
*/
@Deprecated
public SELF runLiquibaseMigrations(String liquibaseChangeLog) {
return initDBWithLiquibaseChangelog(liquibaseChangeLog);
}
/**
* @deprecated since 2.0.B1 in favor of {@link #initDBWithLiquibaseChangelog(String, String)}
*/
@Deprecated
public SELF runLiquibaseMigrations(String liquibaseChangeLog, String liquibaseContext) {
return initDBWithLiquibaseChangelog(liquibaseChangeLog, liquibaseContext);
}
/**
* Schedules execution of a Liquibase changelog file after DB startup.
*
* @param changelog a location of the Liquibase changelog file in Bootique
* {@link io.bootique.resource.ResourceFactory} format.
* @return this tester
* @since 2.0.B1
*/
public SELF initDBWithLiquibaseChangelog(String changelog) {
return initDBWithLiquibaseChangelog(changelog, null);
}
/**
* Schedules execution of a Liquibase changelog file after DB startup.
*
* @param changelog a location of the Liquibase changelog file in Bootique
* {@link io.bootique.resource.ResourceFactory} format.
* @param liquibaseContext Liquibase context expression to filter migrations as appropriate for the test run.
* @return this tester
* @since 2.0.B1
*/
public SELF initDBWithLiquibaseChangelog(String changelog, String liquibaseContext) {
initializer.addLiquibase(changelog, liquibaseContext);
return (SELF) this;
}
/**
* Executes provided SQL script. Assumes statements in the script are separated with ";" character.
*
* @param script a location of the SQL script in Bootique {@link io.bootique.resource.ResourceFactory} format.
* @since 2.0.B1
*/
public void runScript(String script) {
runScript(script, null);
}
/**
* Executes provided SQL script. The script would usually contain database schema and test
* data.
*
* @param script a location of the SQL script in Bootique {@link io.bootique.resource.ResourceFactory} format.
* @param delimiter Optional SQL statements delimiter in the "script". When null, a semicolon is assumed. An
* explicit delimiter may be useful when the file contains common DB delimiters in the middle of
* stored procedure declarations, etc.
*/
public void runScript(String script, String delimiter) {
Objects.requireNonNull(script, "Null 'script'");
new SqlScriptRunner(script).delimiter(delimiter).run(dataSourceHolder);
}
/**
* Configures the Tester to delete data from the specified tables before each test.
*
* @param tablesInInsertOrder a list of table names in the order of INSERT dependencies between them.
* @return this tester
*/
public SELF deleteBeforeEachTest(String... tablesInInsertOrder) {
this.deleteTablesInInsertOrder = tablesInInsertOrder;
return (SELF) this;
}
/**
* Returns a Bootique module that can be used to configure a test DataSource in test {@link io.bootique.BQRuntime}.
* This method can be used to initialize one or more BQRuntimes in a test class, so that they can share the database
* managed by this tester.
*
* @param dataSourceName the name of the DataSource to create or replace in the target runtime
* @return a new Bootique module with test DataSource configuration.
*/
public BQModule moduleWithTestDataSource(String dataSourceName) {
return binder -> configure(binder, dataSourceName);
}
protected abstract DriverDataSource createNonPoolingDataSource(BQTestScope scope);
@Override
public void beforeScope(BQTestScope scope, ExtensionContext context) {
// By now the DataSource may already be initialized
// if BQRuntime using DbTester had some eager dependencies on DataSource
dataSourceHolder.initIfNeeded(() -> createNonPoolingDataSource(scope), this::afterDataSourceInit);
}
@Override
public void afterScope(BQTestScope scope, ExtensionContext context) {
dataSourceHolder.close();
}
@Override
public void beforeMethod(BQTestScope scope, ExtensionContext context) {
if (deleteTablesInInsertOrder != null && deleteTablesInInsertOrder.length > 0) {
new DataManager(getConnector(), deleteTablesInInsertOrder).deleteData();
}
}
protected void afterDataSourceInit() {
initConnector();
initDB();
}
protected void initConnector() {
this.connector = new DbConnector(dataSourceHolder, DbMetadata.create(dataSourceHolder));
}
protected void initDB() {
initializer.exec(dataSourceHolder);
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.edu.learning.builtInServer;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.ui.AppIcon;
import com.jetbrains.edu.learning.StudySettings;
import com.jetbrains.edu.learning.courseFormat.Lesson;
import com.jetbrains.edu.learning.stepic.EduStepicAuthorizedClient;
import com.jetbrains.edu.learning.stepic.EduStepicConnector;
import com.jetbrains.edu.learning.stepic.StepicUser;
import com.jetbrains.edu.learning.stepic.StepicWrappers;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.ide.RestService;
import org.jetbrains.io.Responses;
import javax.swing.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.nio.charset.Charset;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.jetbrains.edu.learning.builtInServer.EduBuiltInServerUtils.*;
import static com.jetbrains.edu.learning.stepic.EduStepicNames.EDU_STEPIK_SERVICE_NAME;
import static com.jetbrains.edu.learning.stepic.EduStepicNames.LINK;
public class EduStepikRestService extends RestService {
private static final Logger LOG = Logger.getInstance(EduStepikRestService.class.getName());
private static final Pattern OPEN_COURSE_PATTERN = Pattern.compile("/" + EDU_STEPIK_SERVICE_NAME + "\\?link=.+");
private static final Pattern COURSE_PATTERN = Pattern.compile("https://stepik\\.org/lesson(?:/[a-zA-Z\\-]*-|/)(\\d+)/step/(\\d+)");
private static final Pattern
OAUTH_CODE_PATTERN = Pattern.compile("/" + RestService.PREFIX + "/" + EDU_STEPIK_SERVICE_NAME + "/oauth" + "\\?code=(\\w+)");
@NotNull
private static String log(@NotNull String message) {
LOG.info(message);
return message;
}
@NotNull
@Override
protected String getServiceName() {
return EDU_STEPIK_SERVICE_NAME;
}
@Override
protected boolean isMethodSupported(@NotNull HttpMethod method) {
return method == HttpMethod.GET;
}
@Override
protected boolean isPrefixlessAllowed() {
return true;
}
@Override
protected boolean isHostTrusted(@NotNull FullHttpRequest request) throws InterruptedException, InvocationTargetException {
String uri = request.uri();
Matcher codeMatcher = OAUTH_CODE_PATTERN.matcher(uri);
if (request.method() == HttpMethod.GET && codeMatcher.matches()) {
return true;
}
return super.isHostTrusted(request);
}
@Nullable
@Override
public String execute(@NotNull QueryStringDecoder urlDecoder, @NotNull FullHttpRequest request, @NotNull ChannelHandlerContext context)
throws IOException {
String uri = urlDecoder.uri();
LOG.info("Request: " + uri);
Matcher matcher = OPEN_COURSE_PATTERN.matcher(uri);
if (matcher.matches()) {
int courseId;
int stepId;
String link = getStringParameter(LINK, urlDecoder);
if (link == null) {
return log("The link parameter was not found");
}
LOG.info("Try to open a course: " + link);
QueryStringDecoder linkDecoder = new QueryStringDecoder(link);
matcher = COURSE_PATTERN.matcher(linkDecoder.path());
if (!matcher.matches()) {
return log("Unrecognized the link parameter");
}
int lessonId;
int stepIndex;
try {
lessonId = Integer.parseInt(matcher.group(1));
stepIndex = Integer.parseInt(matcher.group(2));
} catch (NumberFormatException e) {
return log("Unrecognized the link");
}
int unitId = getIntParameter("unit", linkDecoder);
if (unitId == -1) {
return log("Unrecognized the Unit id");
}
StepicWrappers.Unit unit = EduStepicConnector.getUnit(unitId);
if (unit.getId() == 0) {
return log("Unrecognized the Unit id");
}
StepicWrappers.Section section = EduStepicConnector.getSection(unit.getSection());
courseId = section.getCourse();
if (courseId == 0) {
return log("Unrecognized the course id");
}
Lesson lesson = EduStepicConnector.getLesson(lessonId);
List<Integer> stepIds = lesson.steps;
if (stepIds.isEmpty()) {
return log("Unrecognized the step id");
}
stepId = stepIds.get(stepIndex - 1);
LOG.info(String.format("Try to open a course: courseId=%s, stepId=%s", courseId, stepId));
if (focusOpenProject(courseId, stepId) || openRecentProject(courseId, stepId) || createProject(courseId, stepId)) {
RestService.sendOk(request, context);
LOG.info("Course opened: " + courseId);
return null;
}
RestService.sendStatus(HttpResponseStatus.NOT_FOUND, false, context.channel());
String message = "A project didn't found or created";
LOG.info(message);
return message;
}
Matcher codeMatcher = OAUTH_CODE_PATTERN.matcher(uri);
if (codeMatcher.matches()) {
String code = getStringParameter("code", urlDecoder);
if (code != null) {
StepicUser stepicUser = EduStepicAuthorizedClient.login(code, EduStepicConnector.getOAuthRedirectUrl());
if (stepicUser != null) {
StudySettings.getInstance().setUser(stepicUser);
sendHtmlResponse(request, context, "/oauthResponsePages/okPage.html");
showStepicNotification(NotificationType.INFORMATION,
"Logged in as " + stepicUser.getFirstName() + " " + stepicUser.getLastName());
focusOnApplicationWindow();
return null;
}
}
sendHtmlResponse(request, context, "/oauthResponsePages/errorPage.html");
showStepicNotification(NotificationType.ERROR, "Failed to log in");
return "Couldn't find code parameter for Stepik OAuth";
}
RestService.sendStatus(HttpResponseStatus.BAD_REQUEST, false, context.channel());
String message = "Unknown command: " + uri;
LOG.info(message);
return message;
}
private static void focusOnApplicationWindow() {
JFrame frame = WindowManager.getInstance().findVisibleFrame();
ApplicationManager.getApplication().invokeLater(() -> {
AppIcon.getInstance().requestFocus((IdeFrame)frame);
frame.toFront();
});
}
private void sendHtmlResponse(@NotNull HttpRequest request, @NotNull ChannelHandlerContext context, String pagePath) throws IOException {
BufferExposingByteArrayOutputStream byteOut = new BufferExposingByteArrayOutputStream();
InputStream pageTemplateStream = getClass().getResourceAsStream(pagePath);
String pageTemplate = StreamUtil.readText(pageTemplateStream, Charset.forName("UTF-8"));
try {
String pageWithProductName = pageTemplate.replaceAll("%IDE_NAME", ApplicationNamesInfo.getInstance().getFullProductName());
byteOut.write(StreamUtil.loadFromStream(new ByteArrayInputStream(pageWithProductName.getBytes(Charset.forName("UTF-8")))));
HttpResponse response = Responses.response("text/html", Unpooled.wrappedBuffer(byteOut.getInternalBuffer(), 0, byteOut.size()));
Responses.addNoCache(response);
response.headers().set("X-Frame-Options", "Deny");
Responses.send(response, context.channel(), request);
}
finally {
byteOut.close();
pageTemplateStream.close();
}
}
private static void showStepicNotification(@NotNull NotificationType notificationType, @NotNull String text) {
Notification notification = new Notification("Stepik", "Stepik", text, notificationType);
notification.notify(null);
}
}
|
|
/**
* Wegas
* http://wegas.albasim.ch
*
* Copyright (c) 2013-2021 School of Management and Engineering Vaud, Comem, MEI
* Licensed under the MIT License
*/
package com.wegas.core.rest;
import com.wegas.core.ejb.GameModelFacade;
import com.wegas.core.ejb.JCRFacade;
import com.wegas.core.ejb.RequestManager;
import com.wegas.core.exception.client.WegasErrorMessage;
import com.wegas.core.jcr.content.AbstractContentDescriptor;
import com.wegas.core.jcr.content.ContentConnector;
import com.wegas.core.jcr.content.ContentConnector.WorkspaceType;
import com.wegas.core.jcr.content.DescriptorFactory;
import com.wegas.core.jcr.content.FileDescriptor;
import com.wegas.core.jcr.jta.JCRConnectorProvider;
import com.wegas.core.persistence.game.GameModel;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.List;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipOutputStream;
import javax.ejb.Stateless;
import javax.inject.Inject;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Request;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* @author Cyril Junod (cyril.junod at gmail.com)
*/
@Stateless
@Path("GameModel/{gameModelId : ([1-9][0-9]*)?}/File")
public class FileController {
/**
*
*/
private static final Logger logger = LoggerFactory.getLogger(FileController.class);
/**
*
*/
@Inject
private GameModelFacade gmFacade;
@Inject
private JCRFacade jcrFacade;
@Inject
private RequestManager requestManager;
@Inject
private GameModelFacade gameModelFacade;
@Inject
private JCRConnectorProvider jCRConnectorProvider;
private ContentConnector getContentConnector(long gameModelId) throws RepositoryException {
// find the gameModel to check readRight
GameModel find = gameModelFacade.find(gameModelId);
return jCRConnectorProvider.getContentConnector(find, WorkspaceType.FILES);
}
@POST
@Path("mkdir{directory : .*?}")
public Response mkdir(@PathParam("gameModelId") Long gameModelId,
@PathParam("directory") String path) throws RepositoryException {
GameModel gameModel = gameModelFacade.find(gameModelId);
jcrFacade.assertPathWriteRight(gameModel, path);
jcrFacade.createDirectoryWithParents(gameModel, WorkspaceType.FILES, path);
//requestManager.assertUpdateRight(gameModel);
return Response.noContent().build();
}
/**
* @param gameModelId
* @param name
* @param note
* @param description
* @param path
* @param file
* @param details
* @param force override
*
* @return HTTP 200 if everything OK, 4xx otherwise
*
* @throws RepositoryException
*/
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@Path("{force: (force/)?}upload{directory : .*?}")
public Response upload(@PathParam("gameModelId") Long gameModelId,
@FormDataParam("name") String oName,
@FormDataParam("note") String note,
@FormDataParam("description") String description,
@PathParam("directory") String path,
@FormDataParam("file") InputStream file,
@FormDataParam("file") FormDataBodyPart details,
@PathParam("force") String force) throws RepositoryException {
GameModel gameModel = gameModelFacade.find(gameModelId);
jcrFacade.assertPathWriteRight(gameModel, path);
logger.debug("File name: {}", details.getContentDisposition().getFileName());
final Boolean override = !force.equals("");
String name = oName;
if (name == null) {
byte[] bytes = details.getContentDisposition().getFileName().getBytes(StandardCharsets.ISO_8859_1);
name = new String(bytes, StandardCharsets.UTF_8);
}
AbstractContentDescriptor detachedFile;
//try {
if (details.getContentDisposition().getFileName() == null
|| details.getContentDisposition().getFileName().equals("")) {//Assuming an empty filename means a directory
detachedFile = jcrFacade.createDirectory(gameModel, WorkspaceType.FILES, name, path, note, description);
} else {
detachedFile = jcrFacade.createFile(gameModel, WorkspaceType.FILES, name, path, details.getMediaType().toString(),
note, description, file, override);
}
/*} catch (final WegasRuntimeException ex) {
Response.StatusType status = new Response.StatusType() {
@Override
public int getStatusCode() {
return 430;
}
@Override
public Response.Status.Family getFamily() {
return Response.Status.Family.CLIENT_ERROR;
}
@Override
public String getReasonPhrase() {
return ex.getLocalizedMessage();
}
};
return Response.status(status).build();
}
*/
return Response.ok(detachedFile, MediaType.APPLICATION_JSON).build();
}
/**
* @param gameModelId
* @param name
* @param request
* @param range partial content range
*
* @return the requested file with http 20x, 4xx if something went wrong
*/
@GET
@Path("read{absolutePath : .*?}")
//@CacheAge(time = 48, unit = TimeUnit.HOURS)
public Response read(@PathParam("gameModelId") Long gameModelId,
@PathParam("absolutePath") String name,
@Context Request request,
@HeaderParam("Range") String range) {
logger.debug("Asking file (/{})", name);
AbstractContentDescriptor fileDescriptor;
GameModel gameModel = gameModelFacade.find(gameModelId);
jcrFacade.assertPathReadRight(gameModel, name);
// ContentConnector connector = null;
Response.ResponseBuilder response = Response.status(404);
try {
final ContentConnector connector = this.getContentConnector(gameModelId);
fileDescriptor = DescriptorFactory.getDescriptor(name, connector);
if (fileDescriptor instanceof FileDescriptor) {
FileDescriptor fileD = (FileDescriptor) fileDescriptor;
Date lastModified = fileD.getDataLastModified().getTime();
response = request.evaluatePreconditions(lastModified);
if (range != null && !range.isEmpty()) {
// PARTIAL CONTENT !
String[] ranges = range.split("=")[1].split("-");
final long from = Long.parseLong(ranges[0]);
long length = fileD.getLength();
/**
* Chunk media if the range upper bound is unspecified. Chrome sends "bytes=0-"
*/
long to;
if (ranges.length == 2) {
to = Long.parseLong(ranges[1]);
} else {
//to = from + CHUNK_SIZE; // chunk_size was 2MB
to = length - 1;
}
if (to >= length) {
to = length - 1;
}
final int lengthToRead;
if (to - from + 1 > Integer.MAX_VALUE) {
lengthToRead = Integer.MAX_VALUE;
to = from + lengthToRead;
} else {
lengthToRead = (int) (to - from + 1);
}
final String responseRange = String.format("bytes %d-%d/%d", from, to, length);
BufferedInputStream bis = new BufferedInputStream(fileD.getBase64Data(from, lengthToRead), 512);
response = Response.ok(bis).status(206);
response.header("Accept-Ranges", "bytes");
response.header("Content-Range", responseRange);
response.header("Content-Length", lengthToRead);
response.header("Content-Type", fileDescriptor.getMimeType());
response.header("Description", fileDescriptor.getDescription());
} else {
if (response == null) {
response = Response.ok(new BufferedInputStream(fileD.getBase64Data(), 512));
response.header("Content-Type", fileDescriptor.getMimeType());
response.header("Description", fileDescriptor.getDescription());
}
// set a default cacheControl prevent out CacheResponseFilter to set "no-cache, no-store"
response.cacheControl(new CacheControl()).lastModified(fileD.getDataLastModified().getTime());
}
}
} catch (PathNotFoundException e) {
logger.debug("Asked path does not exist: {}", e.getMessage());
return response.build();
} catch (RepositoryException e) {
logger.error("Need to check those errors", e);
return response.build();
}
return response.build();
}
@GET
@Path("meta{absolutePath : .*?}")
@Produces(MediaType.APPLICATION_JSON)
public AbstractContentDescriptor getMeta(@PathParam("gameModelId") Long gameModelId, @PathParam("absolutePath") String name) {
try {
final ContentConnector connector = this.getContentConnector(gameModelId);
return DescriptorFactory.getDescriptor(name, connector);
} catch (PathNotFoundException e) {
logger.debug("Asked path does not exist: {}", e.getMessage());
} catch (RepositoryException e) {
logger.error("Need to check those errors", e);
}
return null;
}
/**
* @param gameModelId
* @param directory
*
* @return list of directory content
*/
@GET
@Path("list{absoluteDirectoryPath : .*?}")
@Produces(MediaType.APPLICATION_JSON)
public List<AbstractContentDescriptor> listDirectory(@PathParam("gameModelId") Long gameModelId, @PathParam("absoluteDirectoryPath") String directory) {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
return jcrFacade.listDirectory(gameModel, ContentConnector.WorkspaceType.FILES, directory);
}
/**
* @param gameModelId
* @param directory
*
* @return list of directory content and its subdirectories recursively
*/
@GET
@Path("recurseList{absoluteDirectoryPath : .*?}")
@Produces(MediaType.APPLICATION_JSON)
public List<AbstractContentDescriptor> recurseListDirectory(@PathParam("gameModelId") Long gameModelId, @PathParam("absoluteDirectoryPath") String directory) {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
return jcrFacade.recurseListDirectory(gameModel, ContentConnector.WorkspaceType.FILES, directory);
}
/**
* @param gameModelId
*
* @return xml repository export
*
* @throws RepositoryException
* @throws IOException
*/
@GET
@Path("exportRawXML")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response exportXML(@PathParam("gameModelId") Long gameModelId) throws RepositoryException, IOException {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
StreamingOutput out = new StreamingOutput() {
@Override
public void write(OutputStream output) throws IOException, WebApplicationException {
try {
final ContentConnector connector = getContentConnector(gameModelId);
try {
connector.exportXML(output);
} finally {
if (!connector.getManaged()) {
connector.rollback();
}
}
} catch (RepositoryException ex) {
logger.error(null, ex);
}
}
};
return Response.ok(out, MediaType.APPLICATION_OCTET_STREAM).header("content-disposition",
"attachment; filename=WEGAS_" + gmFacade.find(gameModelId).getName() + "_files.xml").build();
}
/**
* @param gameModelId
*
* @return gzipped XML repository export
*
* @throws RepositoryException
*/
@GET
@Path("exportXML")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response exportGZ(@PathParam("gameModelId") Long gameModelId) throws RepositoryException {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
StreamingOutput out = new StreamingOutput() {
@Override
public void write(OutputStream output) throws IOException, WebApplicationException {
try {
try (ByteArrayOutputStream xmlStream = new ByteArrayOutputStream()) {
final ContentConnector connector = getContentConnector(gameModelId);
try {
connector.exportXML(xmlStream);
try (GZIPOutputStream o = new GZIPOutputStream(output)) {
o.write(xmlStream.toByteArray());
}
} finally {
if (!connector.getManaged()) {
connector.rollback();
}
}
}
} catch (RepositoryException ex) {
logger.error(null, ex);
}
}
};
return Response.ok(out, MediaType.APPLICATION_OCTET_STREAM).header("content-disposition",
"attachment; filename=WEGAS_" + gmFacade.find(gameModelId).getName() + "_files.xml.gz").build();
}
/**
* @param gameModelId
*
* @return ZIP repository export
*
* @throws RepositoryException
*/
@GET
@Path("exportZIP")
public Response exportZIP(@PathParam("gameModelId") Long gameModelId) throws RepositoryException {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
StreamingOutput out = new StreamingOutput() {
@Override
public void write(OutputStream output) throws IOException, WebApplicationException {
try (ZipOutputStream zipOutputStream = new ZipOutputStream(output)) {
final ContentConnector connector = getContentConnector(gameModelId);
try {
connector.zipDirectory(zipOutputStream, "/");
} finally {
if (!connector.getManaged()) {
connector.rollback();
}
}
} catch (RepositoryException ex) {
logger.error(null, ex);
}
}
};
return Response.ok(out, "application/zip").
header("content-disposition", "attachment; filename=WEGAS_" + gmFacade.find(gameModelId).getName() + "_files.zip").build();
}
/**
* @param gameModelId
* @param file
* @param details
*
* @return imported repository elements
*
* @throws RepositoryException
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
* @throws TransformerException
*/
@POST
@Path("importXML")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public List<AbstractContentDescriptor> importXML(@PathParam("gameModelId") Long gameModelId,
@FormDataParam("file") InputStream file,
@FormDataParam("file") FormDataBodyPart details)
throws RepositoryException, IOException, SAXException,
ParserConfigurationException, TransformerException {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
try {
final ContentConnector connector = this.getContentConnector(gameModelId);
switch (details.getMediaType().getSubtype()) {
case "x-gzip":
case "gzip":
try (GZIPInputStream in = new GZIPInputStream(file)) {
connector.importXML(in);
}
break;
case "xml":
connector.importXML(file);
break;
default:
throw WegasErrorMessage.error("Uploaded file mimetype does not match requirements [XML or Gunzip], found:"
+ details.getMediaType().toString());
}
} finally {
file.close();
}
return this.listDirectory(gameModelId, "/");
}
/**
* @param gameModelId
* @param absolutePath
* @param force
*
* @return the destroyed element or HTTP not modified
*
* @throws WegasErrorMessage when deleting a non empty directory without force=true
*/
@DELETE
@Path("{force: (force/)?}delete{absolutePath : .*?}")
@Produces(MediaType.APPLICATION_JSON)
public Object delete(@PathParam("gameModelId") Long gameModelId,
@PathParam("absolutePath") String absolutePath,
@PathParam("force") String force) {
GameModel gameModel = gameModelFacade.find(gameModelId);
jcrFacade.assertPathWriteRight(gameModel, absolutePath);
return jcrFacade.delete(gameModel, ContentConnector.WorkspaceType.FILES, absolutePath, force);
}
@POST
@Path("{force: (force/)?}post_delete")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
public Object deleteByPOST(@PathParam("gameModelId") Long gameModelId,
@PathParam("force") String force,
String absolutePath) {
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
return jcrFacade.delete(gameModel, ContentConnector.WorkspaceType.FILES, absolutePath, force);
}
/**
* Update File Meta
*
* @param tmpDescriptor
* @param gameModelId
* @param absolutePath
*
* @return up to date descriptor
*/
@PUT
@Path("update{absolutePath : .*?}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public AbstractContentDescriptor update(AbstractContentDescriptor tmpDescriptor,
@PathParam("gameModelId") Long gameModelId,
@PathParam("absolutePath") String absolutePath) {
AbstractContentDescriptor descriptor;
GameModel gameModel = gameModelFacade.find(gameModelId);
requestManager.assertUpdateRight(gameModel);
try {
final ContentConnector connector = this.getContentConnector(gameModelId);
descriptor = DescriptorFactory.getDescriptor(absolutePath, connector);
descriptor.setNote(tmpDescriptor.getNote());
descriptor.setDescription(tmpDescriptor.getDescription());
if (gameModel.isModel()) {
descriptor.setVisibility(tmpDescriptor.getVisibility());
}
descriptor.saveContentToRepository();
descriptor.loadContentFromRepository(); //Update
return descriptor;
} catch (RepositoryException ex) {
logger.debug("File does not exist", ex);
}
return null;
}
/**
* Well... underlying function not yet implemented do it by hand for now
*
* @param gameModelId
*/
@DELETE
@Path("destruct")
public void deleteWorkspace(@PathParam("gameModelId") Long gameModelId) {
requestManager.checkPermission("GameModel:Delete:gm" + gameModelId);
try {
final ContentConnector fileManager = this.getContentConnector(gameModelId);
fileManager.deleteRoot();
} catch (RepositoryException ex) {
logger.error(null, ex);
}
}
}
|
|
package com.uni.model;
import java.util.Date;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@Entity(name = "AGREEMENTS")
public class Agreement implements AEntity {
@Id
@GeneratedValue
private Long id;
@Enumerated(EnumType.STRING)
private AgreementType type;
@Column(name = "RENT")
private Double rent;
@Column(name = "DEPOSIT")
private Double deposit;
@Column(name = "CAM")
private Double cam;
@Column(name = "DURATION_MONTHS")
private int duration;
@Column(name = "NOTICE_DAYS")
private int noticeDays;
@Column(name = "INITIAL_LOCKIN")
private int lockinDays;
@Column(name = "REGISTRATION_DATE")
private Date registrationDate;
@Column(name = "START_DATE")
private Date startDate;
@Column(name = "END_DATE")
private Date endDate;
@Column(name = "PROPERTY_ADDRESS")
private String propertyAddress;
@OneToMany(fetch = FetchType.EAGER)
private List<Tenant> tenants;
@OneToOne
private Licensor licensor;
@OneToOne
private Broker broker;
@Column(name = "BROKERAGE")
private Double brokerage;
@Enumerated(EnumType.STRING)
private AgreementStatus status;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public AgreementType getType() {
return type;
}
public void setType(AgreementType type) {
this.type = type;
}
public Double getRent() {
return rent;
}
public void setRent(Double rent) {
this.rent = rent;
}
public Double getDeposit() {
return deposit;
}
public void setDeposit(Double deposit) {
this.deposit = deposit;
}
public Double getCam() {
return cam;
}
public void setCam(Double cam) {
this.cam = cam;
}
public int getDuration() {
return duration;
}
public void setDuration(int duration) {
this.duration = duration;
}
public int getNoticeDays() {
return noticeDays;
}
public void setNoticeDays(int noticeDays) {
this.noticeDays = noticeDays;
}
public int getLockinDays() {
return lockinDays;
}
public void setLockinDays(int lockinDays) {
this.lockinDays = lockinDays;
}
public Date getRegistrationDate() {
return registrationDate;
}
public void setRegistrationDate(Date registrationDate) {
this.registrationDate = registrationDate;
}
public Date getStartDate() {
return startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
public String getPropertyAddress() {
return propertyAddress;
}
public void setPropertyAddress(String propertyAddress) {
this.propertyAddress = propertyAddress;
}
public List<Tenant> getTenants() {
return tenants;
}
public void setTenants(List<Tenant> tenants) {
this.tenants = tenants;
}
public Licensor getLicensor() {
return licensor;
}
public void setLicensor(Licensor licensor) {
this.licensor = licensor;
}
public Broker getBroker() {
return broker;
}
public void setBroker(Broker broker) {
this.broker = broker;
}
public Double getBrokerage() {
return brokerage;
}
public void setBrokerage(Double brokerage) {
this.brokerage = brokerage;
}
public AgreementStatus getStatus() {
return status;
}
public void setStatus(AgreementStatus status) {
this.status = status;
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: amrk
* Date: Jul 3, 2005
* Time: 6:15:22 PM
*/
package com.theoryinpractice.testng.configuration;
import com.intellij.codeInsight.completion.CompletionResultSet;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.execution.ExecutionBundle;
import com.intellij.execution.JavaExecutionUtil;
import com.intellij.execution.configuration.BrowseModuleValueActionListener;
import com.intellij.execution.testframework.TestSearchScope;
import com.intellij.execution.ui.AlternativeJREPanel;
import com.intellij.execution.ui.CommonJavaParametersPanel;
import com.intellij.execution.ui.ConfigurationModuleSelector;
import com.intellij.icons.AllIcons;
import com.intellij.ide.util.TreeClassChooser;
import com.intellij.ide.util.TreeClassChooserFactory;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.fileTypes.PlainTextLanguage;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.options.SettingsEditor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.*;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.JavaCodeFragment;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.ui.*;
import com.intellij.ui.components.JBList;
import com.intellij.ui.table.TableView;
import com.intellij.util.IconUtil;
import com.intellij.util.TextFieldCompletionProvider;
import com.theoryinpractice.testng.MessageInfoException;
import com.theoryinpractice.testng.configuration.browser.*;
import com.theoryinpractice.testng.model.*;
import com.theoryinpractice.testng.util.TestNGUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.text.Document;
import javax.swing.text.PlainDocument;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Map;
public class TestNGConfigurationEditor extends SettingsEditor<TestNGConfiguration> implements PanelWithAnchor {
//private static final Logger LOGGER = Logger.getInstance("TestNG Runner");
private final Project project;
private JPanel panel;
private LabeledComponent<EditorTextFieldWithBrowseButton> classField;
private LabeledComponent<JComboBox> moduleClasspath;
private AlternativeJREPanel alternateJDK;
private final ConfigurationModuleSelector moduleSelector;
private JRadioButton suiteTest;
private JRadioButton packageTest;
private JRadioButton classTest;
private JRadioButton methodTest;
private JRadioButton groupTest;
private JRadioButton patternTest;
private final TestNGConfigurationModel model;
private LabeledComponent<EditorTextFieldWithBrowseButton> methodField;
private LabeledComponent<EditorTextFieldWithBrowseButton> packageField;
private LabeledComponent<TextFieldWithBrowseButton.NoPathCompletion> groupField;
private LabeledComponent<TextFieldWithBrowseButton> suiteField;
private JComponent anchor;
private JRadioButton packagesInProject;
private JRadioButton packagesInModule;
private JRadioButton packagesAcrossModules;
private JPanel packagePanel;
private TestNGParametersTableModel propertiesTableModel;
private LabeledComponent<TextFieldWithBrowseButton> propertiesFile;
private LabeledComponent<TextFieldWithBrowseButton> outputDirectory;
private TableView propertiesTableView;
private JPanel commonParametersPanel;//temp compilation problems
private JList myListenersList;
private JCheckBox myUseDefaultReportersCheckBox;
private LabeledComponent<JPanel> myPattern;
private JPanel myPropertiesPanel;
private JPanel myListenersPanel;
TextFieldWithBrowseButton myPatternTextField;
private final CommonJavaParametersPanel commonJavaParameters = new CommonJavaParametersPanel();
private ArrayList<Map.Entry> propertiesList;
private TestNGListenersTableModel listenerModel;
private TestNGConfiguration config;
public TestNGConfigurationEditor(Project project) {
this.project = project;
BrowseModuleValueActionListener[] browseListeners = new BrowseModuleValueActionListener[]{new PackageBrowser(project),
new TestClassBrowser(project, this), new MethodBrowser(project, this), new GroupBrowser(project, this), new SuiteBrowser(project),
new TestClassBrowser(project, this) {
@Override
protected void onClassChoosen(PsiClass psiClass) {
final JTextField textField = myPatternTextField.getTextField();
final String text = textField.getText();
textField.setText(text + (text.length() > 0 ? "||" : "") + psiClass.getQualifiedName());
}
@Override
public void actionPerformed(ActionEvent e) {
showDialog();
}
}};
model = new TestNGConfigurationModel(project);
model.setListener(this);
createView();
moduleSelector = new ConfigurationModuleSelector(project, getModulesComponent());
commonJavaParameters.setModuleContext(moduleSelector.getModule());
moduleClasspath.getComponent().addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
commonJavaParameters.setModuleContext(moduleSelector.getModule());
}
});
final JPanel panel = myPattern.getComponent();
panel.setLayout(new BorderLayout());
myPatternTextField = new TextFieldWithBrowseButton();
myPatternTextField.setButtonIcon(IconUtil.getAddIcon());
panel.add(myPatternTextField, BorderLayout.CENTER);
final FixedSizeButton editBtn = new FixedSizeButton();
editBtn.setIcon(AllIcons.Actions.ShowViewer);
editBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Messages.showTextAreaDialog(myPatternTextField.getTextField(), "Configure suite tests", "EditParametersPopupWindow");
}
});
panel.add(editBtn, BorderLayout.EAST);
registerListener(new JRadioButton[]{packageTest, classTest, methodTest, groupTest, suiteTest, patternTest}, new ChangeListener() {
public void stateChanged(ChangeEvent e) {
ButtonModel buttonModel = (ButtonModel)e.getSource();
if (buttonModel.isSelected()) {
if (buttonModel == packageTest.getModel()) {
model.setType(TestType.PACKAGE);
}
else if (buttonModel == classTest.getModel()) {
model.setType(TestType.CLASS);
}
else if (buttonModel == methodTest.getModel()) {
model.setType(TestType.METHOD);
}
else if (buttonModel == groupTest.getModel()) {
model.setType(TestType.GROUP);
}
else if (buttonModel == suiteTest.getModel()) {
model.setType(TestType.SUITE);
}
else if (buttonModel == patternTest.getModel()) {
model.setType(TestType.PATTERN);
}
redisplay();
}
}
});
registerListener(new JRadioButton[]{packagesInProject, packagesInModule, packagesAcrossModules}, null);
packagesInProject.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent e) {
evaluateModuleClassPath();
}
});
LabeledComponent[] components = new LabeledComponent[]{packageField, classField, methodField, groupField, suiteField, myPattern};
for (int i = 0; i < components.length; i++) {
JComponent field = components[i].getComponent();
Object document = model.getDocument(i);
if (field instanceof TextFieldWithBrowseButton) {
((TextFieldWithBrowseButton)field).getTextField().setDocument((PlainDocument)document);
}
else if (field instanceof EditorTextFieldWithBrowseButton) {
final com.intellij.openapi.editor.Document componentDocument =
((EditorTextFieldWithBrowseButton)field).getChildComponent().getDocument();
model.setDocument(i, componentDocument);
}
else {
field = myPatternTextField;
document = new PlainDocument();
((TextFieldWithBrowseButton)field).getTextField().setDocument((Document)document);
model.setDocument(i, document);
}
browseListeners[i].setField((ComponentWithBrowseButton)field);
}
model.setType(TestType.CLASS);
propertiesFile.getComponent().getTextField().setDocument(model.getPropertiesFileDocument());
outputDirectory.getComponent().getTextField().setDocument(model.getOutputDirectoryDocument());
commonJavaParameters.setProgramParametersLabel(ExecutionBundle.message("junit.configuration.test.runner.parameters.label"));
setAnchor(outputDirectory.getLabel());
alternateJDK.setAnchor(moduleClasspath.getLabel());
commonJavaParameters.setAnchor(moduleClasspath.getLabel());
}
private void evaluateModuleClassPath() {
moduleClasspath.setEnabled(!packagesInProject.isSelected());
}
private void redisplay() {
if (packageTest.isSelected()) {
packagePanel.setVisible(true);
classField.setVisible(false);
methodField.setVisible(false);
groupField.setVisible(false);
suiteField.setVisible(false);
myPattern.setVisible(false);
}
else if (classTest.isSelected()) {
packagePanel.setVisible(false);
classField.setVisible(true);
methodField.setVisible(false);
groupField.setVisible(false);
suiteField.setVisible(false);
myPattern.setVisible(false);
}
else if (methodTest.isSelected()) {
packagePanel.setVisible(false);
classField.setVisible(true);
methodField.setVisible(true);
groupField.setVisible(false);
suiteField.setVisible(false);
myPattern.setVisible(false);
}
else if (groupTest.isSelected()) {
packagePanel.setVisible(false);
classField.setVisible(false);
methodField.setVisible(false);
groupField.setVisible(true);
suiteField.setVisible(false);
myPattern.setVisible(false);
}
else if (suiteTest.isSelected()) {
packagePanel.setVisible(false);
classField.setVisible(false);
methodField.setVisible(false);
groupField.setVisible(false);
suiteField.setVisible(true);
myPattern.setVisible(false);
}
else if (patternTest.isSelected()) {
packagePanel.setVisible(false);
classField.setVisible(false);
methodField.setVisible(false);
groupField.setVisible(false);
suiteField.setVisible(false);
myPattern.setVisible(true);
}
}
public String getClassName() {
return classField.getComponent().getText();
}
public JComboBox getModulesComponent() {
return moduleClasspath.getComponent();
}
@Override
protected void resetEditorFrom(TestNGConfiguration config) {
this.config = config;
model.reset(config);
commonJavaParameters.reset(config);
getModuleSelector().reset(config);
TestData data = config.getPersistantData();
TestSearchScope scope = data.getScope();
if (scope == TestSearchScope.SINGLE_MODULE) {
packagesInModule.setSelected(true);
}
else if (scope == TestSearchScope.MODULE_WITH_DEPENDENCIES) {
packagesAcrossModules.setSelected(true);
}
else {
packagesInProject.setSelected(true);
}
alternateJDK.init(config.ALTERNATIVE_JRE_PATH, config.ALTERNATIVE_JRE_PATH_ENABLED);
propertiesList = new ArrayList<Map.Entry>();
propertiesList.addAll(data.TEST_PROPERTIES.entrySet());
propertiesTableModel.setParameterList(propertiesList);
listenerModel.setListenerList(data.TEST_LISTENERS);
myUseDefaultReportersCheckBox.setSelected(data.USE_DEFAULT_REPORTERS);
}
@Override
public void applyEditorTo(TestNGConfiguration config) {
model.apply(getModuleSelector().getModule(), config);
getModuleSelector().applyTo(config);
TestData data = config.getPersistantData();
if (packageTest.isSelected()) {
if (packagesInProject.isSelected()) {
data.setScope(TestSearchScope.WHOLE_PROJECT);
}
else if (packagesInModule.isSelected()) {
data.setScope(TestSearchScope.SINGLE_MODULE);
}
else if (packagesAcrossModules.isSelected()) data.setScope(TestSearchScope.MODULE_WITH_DEPENDENCIES);
}
else {
data.setScope(TestSearchScope.MODULE_WITH_DEPENDENCIES);
}
commonJavaParameters.applyTo(config);
config.ALTERNATIVE_JRE_PATH = alternateJDK.getPath();
config.ALTERNATIVE_JRE_PATH_ENABLED = alternateJDK.isPathEnabled();
data.TEST_PROPERTIES.clear();
for (Map.Entry<String, String> entry : propertiesList) {
data.TEST_PROPERTIES.put(entry.getKey(), entry.getValue());
}
data.TEST_LISTENERS.clear();
data.TEST_LISTENERS.addAll(listenerModel.getListenerList());
data.USE_DEFAULT_REPORTERS = myUseDefaultReportersCheckBox.isSelected();
}
public ConfigurationModuleSelector getModuleSelector() {
return moduleSelector;
}
@NotNull
@Override
protected JComponent createEditor() {
return panel;
}
@Override
public JComponent getAnchor() {
return anchor;
}
@Override
public void setAnchor(JComponent anchor) {
this.anchor = anchor;
methodField.setAnchor(anchor);
packageField.setAnchor(anchor);
groupField.setAnchor(anchor);
suiteField.setAnchor(anchor);
outputDirectory.setAnchor(anchor);
classField.setAnchor(anchor);
myPattern.setAnchor(anchor);
}
private static void registerListener(JRadioButton[] buttons, ChangeListener changelistener) {
ButtonGroup buttongroup = new ButtonGroup();
for (JRadioButton button : buttons) {
button.getModel().addChangeListener(changelistener);
buttongroup.add(button);
}
if (buttongroup.getSelection() == null) buttongroup.setSelected(buttons[0].getModel(), true);
}
private void createView() {
commonParametersPanel.add(commonJavaParameters, BorderLayout.CENTER);
packageTest.setSelected(false);
suiteTest.setSelected(false);
suiteTest.setEnabled(true);
groupTest.setSelected(false);
groupTest.setEnabled(true);
classTest.setSelected(false);
classTest.setEnabled(true);
patternTest.setSelected(false);
patternTest.setEnabled(true);
classField.setComponent(new EditorTextFieldWithBrowseButton(project, true, new JavaCodeFragment.VisibilityChecker() {
@Override
public Visibility isDeclarationVisible(PsiElement declaration, PsiElement place) {
try {
if (declaration instanceof PsiClass &&
new TestClassBrowser(project, TestNGConfigurationEditor.this).getFilter().isAccepted((PsiClass)declaration)) {
return Visibility.VISIBLE;
}
}
catch (MessageInfoException e) {
return Visibility.NOT_VISIBLE;
}
return Visibility.NOT_VISIBLE;
}
}));
final EditorTextFieldWithBrowseButton methodEditorTextField = new EditorTextFieldWithBrowseButton(project, true,
JavaCodeFragment.VisibilityChecker.EVERYTHING_VISIBLE,
PlainTextLanguage.INSTANCE.getAssociatedFileType());
new TextFieldCompletionProvider() {
@Override
protected void addCompletionVariants(@NotNull String text, int offset, @NotNull String prefix, @NotNull CompletionResultSet result) {
final String className = getClassName();
if (className.trim().length() == 0) {
return;
}
final PsiClass testClass = getModuleSelector().findClass(className);
if (testClass == null) return;
for (PsiMethod psiMethod : testClass.getAllMethods()) {
if (TestNGUtil.hasTest(psiMethod)) {
result.addElement(LookupElementBuilder.create(psiMethod.getName()));
}
}
}
}.apply(methodEditorTextField.getChildComponent());
methodField.setComponent(methodEditorTextField);
groupField.setComponent(new TextFieldWithBrowseButton.NoPathCompletion());
suiteField.setComponent(new TextFieldWithBrowseButton());
packageField.setVisible(true);
packageField.setEnabled(true);
packageField.setComponent(new EditorTextFieldWithBrowseButton(project, false));
TextFieldWithBrowseButton outputDirectoryButton = new TextFieldWithBrowseButton();
outputDirectory.setComponent(outputDirectoryButton);
outputDirectoryButton.addBrowseFolderListener("TestNG", "Select test output directory", project,
FileChooserDescriptorFactory.createSingleFolderDescriptor());
moduleClasspath.setEnabled(true);
moduleClasspath.setComponent(new JComboBox());
propertiesTableModel = new TestNGParametersTableModel();
listenerModel = new TestNGListenersTableModel();
TextFieldWithBrowseButton textFieldWithBrowseButton = new TextFieldWithBrowseButton();
propertiesFile.setComponent(textFieldWithBrowseButton);
FileChooserDescriptor propertiesFileDescriptor = new FileChooserDescriptor(true, false, false, false, false, false) {
@Override
public boolean isFileVisible(VirtualFile virtualFile, boolean showHidden) {
if (!showHidden && virtualFile.getName().charAt(0) == '.') return false;
return virtualFile.isDirectory() || "properties".equals(virtualFile.getExtension());
}
};
textFieldWithBrowseButton
.addBrowseFolderListener("TestNG", "Select .properties file for test properties", project, propertiesFileDescriptor);
propertiesTableView = new TableView();
propertiesTableView.setModelAndUpdateColumns(propertiesTableModel);
propertiesTableView.setShowGrid(true);
myPropertiesPanel.add(
ToolbarDecorator.createDecorator(propertiesTableView)
.setAddAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
propertiesTableModel.addParameter();
int index = propertiesTableModel.getRowCount() - 1;
propertiesTableView.setRowSelectionInterval(index, index);
}
}).setRemoveAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
int idx = propertiesTableView.getSelectedRow() - 1;
for (int row : propertiesTableView.getSelectedRows()) {
propertiesTableModel.removeProperty(row);
}
if (idx > -1) propertiesTableView.setRowSelectionInterval(idx, idx);
}
}).disableUpDownActions().createPanel(), BorderLayout.CENTER);
myListenersList = new JBList(listenerModel);
myListenersPanel.add(
ToolbarDecorator.createDecorator(myListenersList).setAddAction(new AddActionButtonRunnable())
.setRemoveAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
int idx = myListenersList.getSelectedIndex() - 1;
for (int row : myListenersList.getSelectedIndices()) {
listenerModel.removeListener(row);
}
if (idx > -1) myListenersList.setSelectedIndex(idx);
}
}).setAddActionUpdater(new AnActionButtonUpdater() {
@Override
public boolean isEnabled(AnActionEvent e) {
return !project.isDefault();
}
}).disableUpDownActions().createPanel(), BorderLayout.CENTER);
}
public void onTypeChanged(TestType type) {
//LOGGER.info("onTypeChanged with " + type);
if (type != TestType.PACKAGE && type != TestType.SUITE) {
moduleClasspath.setEnabled(true);
}
else {
evaluateModuleClassPath();
}
if (type == TestType.PACKAGE) {
packageTest.setSelected(true);
packageField.setEnabled(true);
classField.setEnabled(false);
methodField.setEnabled(false);
groupField.setEnabled(false);
suiteField.setEnabled(false);
myPattern.setEnabled(false);
}
else if (type == TestType.CLASS) {
classTest.setSelected(true);
packageField.setEnabled(false);
classField.setEnabled(true);
methodField.setEnabled(false);
groupField.setEnabled(false);
suiteField.setEnabled(false);
myPattern.setEnabled(false);
}
else if (type == TestType.METHOD) {
methodTest.setSelected(true);
packageField.setEnabled(false);
classField.setEnabled(true);
methodField.setEnabled(true);
groupField.setEnabled(false);
suiteField.setEnabled(false);
myPattern.setEnabled(false);
}
else if (type == TestType.GROUP) {
groupTest.setSelected(true);
groupField.setEnabled(true);
packageField.setEnabled(false);
classField.setEnabled(false);
methodField.setEnabled(false);
suiteField.setEnabled(false);
myPattern.setEnabled(false);
}
else if (type == TestType.SUITE) {
suiteTest.setSelected(true);
suiteField.setEnabled(true);
packageField.setEnabled(false);
classField.setEnabled(false);
methodField.setEnabled(false);
groupField.setEnabled(false);
myPattern.setEnabled(false);
}
else if (type == TestType.PATTERN) {
patternTest.setSelected(true);
myPattern.setEnabled(true);
suiteField.setEnabled(false);
packageField.setEnabled(false);
classField.setEnabled(false);
methodField.setEnabled(false);
groupField.setEnabled(false);
}
}
private class AddActionButtonRunnable implements AnActionButtonRunnable {
private final Logger LOGGER = Logger.getInstance("TestNG Runner");
@Nullable
protected GlobalSearchScope getSearchScope(Module[] modules) {
if (modules == null || modules.length == 0) return null;
GlobalSearchScope scope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(modules[0]);
for (int i = 1; i < modules.length; i++) {
scope.uniteWith(GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(modules[i]));
}
return scope;
}
@Nullable
protected String selectListenerClass() {
GlobalSearchScope searchScope = getSearchScope(config.getModules());
if (searchScope == null) {
searchScope = GlobalSearchScope.allScope(project);
}
final TestListenerFilter filter = new TestListenerFilter(searchScope, project);
TreeClassChooser chooser = TreeClassChooserFactory.getInstance(project)
.createWithInnerClassesScopeChooser("Choose Listener Class", filter.getScope(), filter, null);
chooser.showDialog();
PsiClass psiclass = chooser.getSelected();
if (psiclass == null) {
return null;
}
else {
return JavaExecutionUtil.getRuntimeQualifiedName(psiclass);
}
}
@Override
public void run(AnActionButton button) {
final String className = selectListenerClass();
if (className != null) {
listenerModel.addListener(className);
LOGGER.info("Adding listener " + className + " to configuration.");
}
}
}
}
|
|
package dto.Static;
import java.io.Serializable;
/*
* Copyright 2014 Taylor Caldwell
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class LanguageStringsData implements Serializable {
private static final long serialVersionUID = 7114446344596103892L;
private String RecommendedItems, masteryUtility, FlatSpellBlockMod, categoryChampion, colloq_Defense, modeCof, Fighter, ButtonBuy, rFlatTimeDeadModPerLevel,
Boots, categorySummoner, colloq_Slow, colloq_SpellVamp, native_bg, Gold, FlatEXPBonus, recommended_essential, mobileCompanion, FlatEnergyPoolMod,
colloq_Consumables, recommended_starting, modeOneforall, recommended_defensive, colloq_Mana, rFlatEnergyRegenModPerLevel, ArmorPenetration,
rFlatMPRegenModPerLevel, native_ru, colloq_ManaRegen, PercentSpellVampMod, CriticalStrike, native_ro, recommended_offensive,
rPercentArmorPenetrationModPerLevel, PercentArmorMod, PercentBlockMod, colloq_Tenacity, CD_, colloq_CooldownReduction, ButtonSell, Attack,
PercentMovementSpeedMod, rFlatEnergyModPerLevel, PercentMPPoolMod, Damage, Range, PercentCritChanceMod, PercentCritDamageMod, masteryDefense,
rFlatMagicPenetrationMod, MagicPenetration, OriginalCost_, native_pt, rPercentCooldownMod, Language, native_pl, GoldPer, recommended_consumables,
NextRank_, rFlatSpellBlockModPerLevel, colloq_Movement, native_fr, colloq_Armor, colloq_Consumable, SpellDamage, Tank, Mage, native_en,
FlatBlockMod, PercentEXPBonus, statAttack, native_es, statAbility, colloq_SpellBlock, rFlatGoldPer10Mod, FlatAttackSpeedMod, mobileNews, Lore,
FlatHPPoolMod, native_zh_TW, PercentAttackSpeedMod, rFlatDodgeMod, native_vn, Map12, FlatCritDamageMod, rFlatMovementSpeedModPerLevel, Map10,
rFlatMPModPerLevel, native_el, colloq_MagicPenetration, Marksman, Builds_, Continue, Health, FlatHPRegenMod, rFlatTimeDeadMod, native_th,
colloq_Active, mobileForum, colloq_HealthRegen, colloq_Damage, colloq_Aura, Assassin, native_de, spells_Self, PrimaryRole, Rank_,
SpecialRecipeLarge, Cost_, CooldownReduction, rPercentAttackSpeedModPerLevel, native_cs, native_ar, colloq_AttackSpeed, SellsFor_,
rPercentArmorPenetrationMod, rFlatMagicPenetrationModPerLevel, native_tr, rPercentCooldownModPerLevel, categoryItem, Consumable, Stats,
colloq_LifeSteal, Tips, statDefense, PercentMPRegenMod, masteryOffense, SpellBlock, PercentPhysicalDamageMod, FlatCritChanceMod,
PercentSpellBlockMod, native_id, native_ja, rFlatHPModPerLevel, Defense, colloq_SpellDamage, rPercentMovementSpeedModPerLevel, native_it,
colloq_Health, Slow, Active, Require_, PercentMagicDamageMod, Map1, PlayingAs, FlatMPPoolMod, FlatPhysicalDamageMod, rFlatPhysicalDamageModPerLevel,
ManaRegen, Range_, categoryRune, Map8, Mana, rFlatCritChanceModPerLevel, rPercentMagicPenetrationModPerLevel, LifeSteal, spells_target_100,
FlatArmorMod, rPercentMagicPenetrationMod, Tenacity, Details_, PercentDodgeMod, statUtility, Magic, modeTutorial, SpecialRecipeSmall,
colloq_CriticalStrike, native_zh_CN, FlatMovementSpeedMod, modeFirstblood, native_hu, colloq_ArmorPenetration, HealthRegen, rFlatArmorModPerLevel,
colloq_NonbootsMovement, UpgradeCost_, Back, spells_target_6, rFlatMagicDamageModPerLevel, spells_target_7, spells_target_8, spells_target_2,
spells_target_3, spells_target_4, spells_target_5, native_zh_MY, spells_target_0, PlayingAgainst, spells_target_1, FlatMPRegenMod, colloq_GoldPer,
native_nl, rFlatHPRegenModPerLevel, colloq_Attack, Movement, rPercentTimeDeadMod, FlatEnergyRegenMod, modeAram, PercentHPRegenMod, colloq_Magic,
rFlatArmorPenetrationMod, mobileFriends, AllItems, Abilities, ChampionInfo, native_ko, rFlatCritDamageModPerLevel, mode6v6, Aura, mobilePleaseWait,
rFlatDodgeModPerLevel, statDifficulty, modeOdin, Support, PercentLifeStealMod, rPercentTimeDeadModPerLevel, AttackSpeed, native_zh, SpellVamp,
ItemInfo, PercentHPPoolMod, NonbootsMovement, colloq_Boots, rFlatArmorPenetrationModPerLevel, Armor, FlatMagicDamageMod, modeClassic, Level,
categoryMastery;
public String getRecommendedItems() {
return RecommendedItems;
}
public String getMasteryUtility() {
return masteryUtility;
}
public String getFlatSpellBlockMod() {
return FlatSpellBlockMod;
}
public String getCategoryChampion() {
return categoryChampion;
}
public String getColloq_Defense() {
return colloq_Defense;
}
public String getModeCof() {
return modeCof;
}
public String getFighter() {
return Fighter;
}
public String getButtonBuy() {
return ButtonBuy;
}
public String getrFlatTimeDeadModPerLevel() {
return rFlatTimeDeadModPerLevel;
}
public String getBoots() {
return Boots;
}
public String getCategorySummoner() {
return categorySummoner;
}
public String getColloq_Slow() {
return colloq_Slow;
}
public String getColloq_SpellVamp() {
return colloq_SpellVamp;
}
public String getNative_bg() {
return native_bg;
}
public String getGold() {
return Gold;
}
public String getFlatEXPBonus() {
return FlatEXPBonus;
}
public String getRecommended_essential() {
return recommended_essential;
}
public String getMobileCompanion() {
return mobileCompanion;
}
public String getFlatEnergyPoolMod() {
return FlatEnergyPoolMod;
}
public String getColloq_Consumables() {
return colloq_Consumables;
}
public String getRecommended_starting() {
return recommended_starting;
}
public String getModeOneforall() {
return modeOneforall;
}
public String getRecommended_defensive() {
return recommended_defensive;
}
public String getColloq_Mana() {
return colloq_Mana;
}
public String getrFlatEnergyRegenModPerLevel() {
return rFlatEnergyRegenModPerLevel;
}
public String getArmorPenetration() {
return ArmorPenetration;
}
public String getrFlatMPRegenModPerLevel() {
return rFlatMPRegenModPerLevel;
}
public String getNative_ru() {
return native_ru;
}
public String getColloq_ManaRegen() {
return colloq_ManaRegen;
}
public String getPercentSpellVampMod() {
return PercentSpellVampMod;
}
public String getCriticalStrike() {
return CriticalStrike;
}
public String getNative_ro() {
return native_ro;
}
public String getRecommended_offensive() {
return recommended_offensive;
}
public String getrPercentArmorPenetrationModPerLevel() {
return rPercentArmorPenetrationModPerLevel;
}
public String getPercentArmorMod() {
return PercentArmorMod;
}
public String getPercentBlockMod() {
return PercentBlockMod;
}
public String getColloq_Tenacity() {
return colloq_Tenacity;
}
public String getCD_() {
return CD_;
}
public String getColloq_CooldownReduction() {
return colloq_CooldownReduction;
}
public String getButtonSell() {
return ButtonSell;
}
public String getAttack() {
return Attack;
}
public String getPercentMovementSpeedMod() {
return PercentMovementSpeedMod;
}
public String getrFlatEnergyModPerLevel() {
return rFlatEnergyModPerLevel;
}
public String getPercentMPPoolMod() {
return PercentMPPoolMod;
}
public String getDamage() {
return Damage;
}
public String getRange() {
return Range;
}
public String getPercentCritChanceMod() {
return PercentCritChanceMod;
}
public String getPercentCritDamageMod() {
return PercentCritDamageMod;
}
public String getMasteryDefense() {
return masteryDefense;
}
public String getrFlatMagicPenetrationMod() {
return rFlatMagicPenetrationMod;
}
public String getMagicPenetration() {
return MagicPenetration;
}
public String getOriginalCost_() {
return OriginalCost_;
}
public String getNative_pt() {
return native_pt;
}
public String getrPercentCooldownMod() {
return rPercentCooldownMod;
}
public String getLanguage() {
return Language;
}
public String getNative_pl() {
return native_pl;
}
public String getGoldPer() {
return GoldPer;
}
public String getRecommended_consumables() {
return recommended_consumables;
}
public String getNextRank_() {
return NextRank_;
}
public String getrFlatSpellBlockModPerLevel() {
return rFlatSpellBlockModPerLevel;
}
public String getColloq_Movement() {
return colloq_Movement;
}
public String getNative_fr() {
return native_fr;
}
public String getColloq_Armor() {
return colloq_Armor;
}
public String getColloq_Consumable() {
return colloq_Consumable;
}
public String getSpellDamage() {
return SpellDamage;
}
public String getTank() {
return Tank;
}
public String getMage() {
return Mage;
}
public String getNative_en() {
return native_en;
}
public String getFlatBlockMod() {
return FlatBlockMod;
}
public String getPercentEXPBonus() {
return PercentEXPBonus;
}
public String getStatAttack() {
return statAttack;
}
public String getNative_es() {
return native_es;
}
public String getStatAbility() {
return statAbility;
}
public String getColloq_SpellBlock() {
return colloq_SpellBlock;
}
public String getrFlatGoldPer10Mod() {
return rFlatGoldPer10Mod;
}
public String getFlatAttackSpeedMod() {
return FlatAttackSpeedMod;
}
public String getMobileNews() {
return mobileNews;
}
public String getLore() {
return Lore;
}
public String getFlatHPPoolMod() {
return FlatHPPoolMod;
}
public String getNative_zh_TW() {
return native_zh_TW;
}
public String getPercentAttackSpeedMod() {
return PercentAttackSpeedMod;
}
public String getrFlatDodgeMod() {
return rFlatDodgeMod;
}
public String getNative_vn() {
return native_vn;
}
public String getMap12() {
return Map12;
}
public String getFlatCritDamageMod() {
return FlatCritDamageMod;
}
public String getrFlatMovementSpeedModPerLevel() {
return rFlatMovementSpeedModPerLevel;
}
public String getMap10() {
return Map10;
}
public String getrFlatMPModPerLevel() {
return rFlatMPModPerLevel;
}
public String getNative_el() {
return native_el;
}
public String getColloq_MagicPenetration() {
return colloq_MagicPenetration;
}
public String getMarksman() {
return Marksman;
}
public String getBuilds_() {
return Builds_;
}
public String getContinue() {
return Continue;
}
public String getHealth() {
return Health;
}
public String getFlatHPRegenMod() {
return FlatHPRegenMod;
}
public String getrFlatTimeDeadMod() {
return rFlatTimeDeadMod;
}
public String getNative_th() {
return native_th;
}
public String getColloq_Active() {
return colloq_Active;
}
public String getMobileForum() {
return mobileForum;
}
public String getColloq_HealthRegen() {
return colloq_HealthRegen;
}
public String getColloq_Damage() {
return colloq_Damage;
}
public String getColloq_Aura() {
return colloq_Aura;
}
public String getAssassin() {
return Assassin;
}
public String getNative_de() {
return native_de;
}
public String getSpells_Self() {
return spells_Self;
}
public String getPrimaryRole() {
return PrimaryRole;
}
public String getRank_() {
return Rank_;
}
public String getSpecialRecipeLarge() {
return SpecialRecipeLarge;
}
public String getCost_() {
return Cost_;
}
public String getCooldownReduction() {
return CooldownReduction;
}
public String getrPercentAttackSpeedModPerLevel() {
return rPercentAttackSpeedModPerLevel;
}
public String getNative_cs() {
return native_cs;
}
public String getNative_ar() {
return native_ar;
}
public String getColloq_AttackSpeed() {
return colloq_AttackSpeed;
}
public String getSellsFor_() {
return SellsFor_;
}
public String getrPercentArmorPenetrationMod() {
return rPercentArmorPenetrationMod;
}
public String getrFlatMagicPenetrationModPerLevel() {
return rFlatMagicPenetrationModPerLevel;
}
public String getNative_tr() {
return native_tr;
}
public String getrPercentCooldownModPerLevel() {
return rPercentCooldownModPerLevel;
}
public String getCategoryItem() {
return categoryItem;
}
public String getConsumable() {
return Consumable;
}
public String getStats() {
return Stats;
}
public String getColloq_LifeSteal() {
return colloq_LifeSteal;
}
public String getTips() {
return Tips;
}
public String getStatDefense() {
return statDefense;
}
public String getPercentMPRegenMod() {
return PercentMPRegenMod;
}
public String getMasteryOffense() {
return masteryOffense;
}
public String getSpellBlock() {
return SpellBlock;
}
public String getPercentPhysicalDamageMod() {
return PercentPhysicalDamageMod;
}
public String getFlatCritChanceMod() {
return FlatCritChanceMod;
}
public String getPercentSpellBlockMod() {
return PercentSpellBlockMod;
}
public String getNative_id() {
return native_id;
}
public String getNative_ja() {
return native_ja;
}
public String getrFlatHPModPerLevel() {
return rFlatHPModPerLevel;
}
public String getDefense() {
return Defense;
}
public String getColloq_SpellDamage() {
return colloq_SpellDamage;
}
public String getrPercentMovementSpeedModPerLevel() {
return rPercentMovementSpeedModPerLevel;
}
public String getNative_it() {
return native_it;
}
public String getColloq_Health() {
return colloq_Health;
}
public String getSlow() {
return Slow;
}
public String getActive() {
return Active;
}
public String getRequire_() {
return Require_;
}
public String getPercentMagicDamageMod() {
return PercentMagicDamageMod;
}
public String getMap1() {
return Map1;
}
public String getPlayingAs() {
return PlayingAs;
}
public String getFlatMPPoolMod() {
return FlatMPPoolMod;
}
public String getFlatPhysicalDamageMod() {
return FlatPhysicalDamageMod;
}
public String getrFlatPhysicalDamageModPerLevel() {
return rFlatPhysicalDamageModPerLevel;
}
public String getManaRegen() {
return ManaRegen;
}
public String getRange_() {
return Range_;
}
public String getCategoryRune() {
return categoryRune;
}
public String getMap8() {
return Map8;
}
public String getMana() {
return Mana;
}
public String getrFlatCritChanceModPerLevel() {
return rFlatCritChanceModPerLevel;
}
public String getrPercentMagicPenetrationModPerLevel() {
return rPercentMagicPenetrationModPerLevel;
}
public String getLifeSteal() {
return LifeSteal;
}
public String getSpells_target_100() {
return spells_target_100;
}
public String getFlatArmorMod() {
return FlatArmorMod;
}
public String getrPercentMagicPenetrationMod() {
return rPercentMagicPenetrationMod;
}
public String getTenacity() {
return Tenacity;
}
public String getDetails_() {
return Details_;
}
public String getPercentDodgeMod() {
return PercentDodgeMod;
}
public String getStatUtility() {
return statUtility;
}
public String getMagic() {
return Magic;
}
public String getModeTutorial() {
return modeTutorial;
}
public String getSpecialRecipeSmall() {
return SpecialRecipeSmall;
}
public String getColloq_CriticalStrike() {
return colloq_CriticalStrike;
}
public String getNative_zh_CN() {
return native_zh_CN;
}
public String getFlatMovementSpeedMod() {
return FlatMovementSpeedMod;
}
public String getModeFirstblood() {
return modeFirstblood;
}
public String getNative_hu() {
return native_hu;
}
public String getColloq_ArmorPenetration() {
return colloq_ArmorPenetration;
}
public String getHealthRegen() {
return HealthRegen;
}
public String getrFlatArmorModPerLevel() {
return rFlatArmorModPerLevel;
}
public String getColloq_NonbootsMovement() {
return colloq_NonbootsMovement;
}
public String getUpgradeCost_() {
return UpgradeCost_;
}
public String getBack() {
return Back;
}
public String getSpells_target_6() {
return spells_target_6;
}
public String getrFlatMagicDamageModPerLevel() {
return rFlatMagicDamageModPerLevel;
}
public String getSpells_target_7() {
return spells_target_7;
}
public String getSpells_target_8() {
return spells_target_8;
}
public String getSpells_target_2() {
return spells_target_2;
}
public String getSpells_target_3() {
return spells_target_3;
}
public String getSpells_target_4() {
return spells_target_4;
}
public String getSpells_target_5() {
return spells_target_5;
}
public String getNative_zh_MY() {
return native_zh_MY;
}
public String getSpells_target_0() {
return spells_target_0;
}
public String getPlayingAgainst() {
return PlayingAgainst;
}
public String getSpells_target_1() {
return spells_target_1;
}
public String getFlatMPRegenMod() {
return FlatMPRegenMod;
}
public String getColloq_GoldPer() {
return colloq_GoldPer;
}
public String getNative_nl() {
return native_nl;
}
public String getrFlatHPRegenModPerLevel() {
return rFlatHPRegenModPerLevel;
}
public String getColloq_Attack() {
return colloq_Attack;
}
public String getMovement() {
return Movement;
}
public String getrPercentTimeDeadMod() {
return rPercentTimeDeadMod;
}
public String getFlatEnergyRegenMod() {
return FlatEnergyRegenMod;
}
public String getModeAram() {
return modeAram;
}
public String getPercentHPRegenMod() {
return PercentHPRegenMod;
}
public String getColloq_Magic() {
return colloq_Magic;
}
public String getrFlatArmorPenetrationMod() {
return rFlatArmorPenetrationMod;
}
public String getMobileFriends() {
return mobileFriends;
}
public String getAllItems() {
return AllItems;
}
public String getAbilities() {
return Abilities;
}
public String getChampionInfo() {
return ChampionInfo;
}
public String getNative_ko() {
return native_ko;
}
public String getrFlatCritDamageModPerLevel() {
return rFlatCritDamageModPerLevel;
}
public String getMode6v6() {
return mode6v6;
}
public String getAura() {
return Aura;
}
public String getMobilePleaseWait() {
return mobilePleaseWait;
}
public String getrFlatDodgeModPerLevel() {
return rFlatDodgeModPerLevel;
}
public String getStatDifficulty() {
return statDifficulty;
}
public String getModeOdin() {
return modeOdin;
}
public String getSupport() {
return Support;
}
public String getPercentLifeStealMod() {
return PercentLifeStealMod;
}
public String getrPercentTimeDeadModPerLevel() {
return rPercentTimeDeadModPerLevel;
}
public String getAttackSpeed() {
return AttackSpeed;
}
public String getNative_zh() {
return native_zh;
}
public String getSpellVamp() {
return SpellVamp;
}
public String getItemInfo() {
return ItemInfo;
}
public String getPercentHPPoolMod() {
return PercentHPPoolMod;
}
public String getNonbootsMovement() {
return NonbootsMovement;
}
public String getColloq_Boots() {
return colloq_Boots;
}
public String getrFlatArmorPenetrationModPerLevel() {
return rFlatArmorPenetrationModPerLevel;
}
public String getArmor() {
return Armor;
}
public String getFlatMagicDamageMod() {
return FlatMagicDamageMod;
}
public String getModeClassic() {
return modeClassic;
}
public String getLevel() {
return Level;
}
public String getCategoryMastery() {
return categoryMastery;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.qpid.jms.integration;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.jms.IllegalStateRuntimeException;
import javax.jms.JMSConsumer;
import javax.jms.JMSContext;
import javax.jms.JMSRuntimeException;
import javax.jms.Message;
import javax.jms.MessageFormatRuntimeException;
import javax.jms.MessageListener;
import javax.jms.Queue;
import org.apache.qpid.jms.provider.amqp.message.AmqpMessageSupport;
import org.apache.qpid.jms.test.QpidJmsTestCase;
import org.apache.qpid.jms.test.Wait;
import org.apache.qpid.jms.test.testpeer.TestAmqpPeer;
import org.apache.qpid.jms.test.testpeer.basictypes.AmqpError;
import org.apache.qpid.jms.test.testpeer.describedtypes.sections.AmqpValueDescribedType;
import org.apache.qpid.jms.test.testpeer.describedtypes.sections.DataDescribedType;
import org.apache.qpid.jms.test.testpeer.describedtypes.sections.MessageAnnotationsDescribedType;
import org.apache.qpid.jms.test.testpeer.describedtypes.sections.PropertiesDescribedType;
import org.apache.qpid.proton.amqp.Binary;
import org.apache.qpid.proton.amqp.DescribedType;
import org.apache.qpid.proton.amqp.UnsignedInteger;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JMSConsumerIntegrationTest extends QpidJmsTestCase {
private static final Logger LOG = LoggerFactory.getLogger(JMSConsumerIntegrationTest.class);
private final IntegrationTestFixture testFixture = new IntegrationTestFixture();
@Test(timeout = 20000)
public void testCreateConsumer() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
testPeer.expectReceiverAttach();
testPeer.expectLinkFlow();
Queue queue = context.createQueue("test");
JMSConsumer consumer = context.createConsumer(queue);
assertNotNull(consumer);
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testRemotelyCloseJMSConsumer() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
// Create a consumer, then remotely end it afterwards.
testPeer.expectReceiverAttach();
testPeer.expectLinkFlow();
testPeer.remotelyDetachLastOpenedLinkOnLastOpenedSession(true, true, AmqpError.RESOURCE_DELETED, "resource closed");
Queue queue = context.createQueue("myQueue");
final JMSConsumer consumer = context.createConsumer(queue);
// Verify the consumer gets marked closed
testPeer.waitForAllHandlersToComplete(1000);
assertTrue("JMSConsumer never closed.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisfied() throws Exception {
try {
consumer.getMessageListener();
} catch (IllegalStateRuntimeException jmsise) {
return true;
}
return false;
}
}, 10000, 10));
// Try closing it explicitly, should effectively no-op in client.
// The test peer will throw during close if it sends anything.
consumer.close();
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testReceiveMessageWithReceiveZeroTimeout() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue queue = context.createQueue("myQueue");
DescribedType amqpValueNullContent = new AmqpValueDescribedType(null);
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, amqpValueNullContent);
testPeer.expectDispositionThatIsAcceptedAndSettled();
JMSConsumer messageConsumer = context.createConsumer(queue);
Message receivedMessage = messageConsumer.receive(0);
assertNotNull("A message should have been recieved", receivedMessage);
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(2000);
}
}
@Test(timeout=20000)
public void testConsumerReceiveNoWaitThrowsIfConnectionLost() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue queue = context.createQueue("queue");
testPeer.expectReceiverAttach();
testPeer.expectLinkFlow(false, notNullValue(UnsignedInteger.class));
testPeer.expectLinkFlow(true, notNullValue(UnsignedInteger.class));
testPeer.dropAfterLastHandler();
final JMSConsumer consumer = context.createConsumer(queue);
try {
consumer.receiveNoWait();
fail("An exception should have been thrown");
} catch (JMSRuntimeException jmsre) {
// Expected
}
try {
context.close();
} catch (Throwable ignored) {
}
}
}
@Test(timeout=20000)
public void testNoReceivedMessagesWhenConnectionNotStarted() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
context.setAutoStart(false);
testPeer.expectBegin();
Queue destination = context.createQueue(getTestName());
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, new AmqpValueDescribedType("content"), 3);
testPeer.expectDispositionThatIsAcceptedAndSettled();
JMSConsumer consumer = context.createConsumer(destination);
assertNull(consumer.receive(100));
context.start();
assertNotNull(consumer.receive(2000));
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(2000);
}
}
@Test(timeout=60000)
public void testSyncReceiveFailsWhenListenerSet() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue destination = context.createQueue(getTestName());
testPeer.expectReceiverAttach();
testPeer.expectLinkFlow();
JMSConsumer consumer = context.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
LOG.warn("Async consumer got unexpected Message: {}", m);
}
});
try {
consumer.receive();
fail("Should have thrown an exception.");
} catch (JMSRuntimeException ex) {
}
try {
consumer.receive(1000);
fail("Should have thrown an exception.");
} catch (JMSRuntimeException ex) {
}
try {
consumer.receiveNoWait();
fail("Should have thrown an exception.");
} catch (JMSRuntimeException ex) {
}
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(2000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyMapMessage() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue queue = context.createQueue("myQueue");
// Prepare an AMQP message for the test peer to send, containing an
// AmqpValue section holding a map with entries for each supported type,
// and annotated as a JMS map message.
String myBoolKey = "myBool";
boolean myBool = true;
String myByteKey = "myByte";
byte myByte = 4;
String myBytesKey = "myBytes";
byte[] myBytes = myBytesKey.getBytes();
String myCharKey = "myChar";
char myChar = 'd';
String myDoubleKey = "myDouble";
double myDouble = 1234567890123456789.1234;
String myFloatKey = "myFloat";
float myFloat = 1.1F;
String myIntKey = "myInt";
int myInt = Integer.MAX_VALUE;
String myLongKey = "myLong";
long myLong = Long.MAX_VALUE;
String myShortKey = "myShort";
short myShort = 25;
String myStringKey = "myString";
String myString = myStringKey;
Map<String, Object> map = new LinkedHashMap<String, Object>();
map.put(myBoolKey, myBool);
map.put(myByteKey, myByte);
map.put(myBytesKey, new Binary(myBytes));// the underlying AMQP message uses Binary rather than byte[] directly.
map.put(myCharKey, myChar);
map.put(myDoubleKey, myDouble);
map.put(myFloatKey, myFloat);
map.put(myIntKey, myInt);
map.put(myLongKey, myLong);
map.put(myShortKey, myShort);
map.put(myStringKey, myString);
MessageAnnotationsDescribedType msgAnnotations = new MessageAnnotationsDescribedType();
msgAnnotations.setSymbolKeyedAnnotation(AmqpMessageSupport.JMS_MSG_TYPE.toString(), AmqpMessageSupport.JMS_MAP_MESSAGE);
DescribedType amqpValueSectionContent = new AmqpValueDescribedType(map);
// receive the message from the test peer
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, msgAnnotations, null, null, amqpValueSectionContent);
testPeer.expectDispositionThatIsAcceptedAndSettled();
testPeer.expectEnd();
testPeer.expectClose();
JMSConsumer messageConsumer = context.createConsumer(queue);
@SuppressWarnings("unchecked")
Map<String, Object> receivedMap = messageConsumer.receiveBody(Map.class, 3000);
// verify the content is as expected
assertNotNull("Map was not received", receivedMap);
assertEquals("Unexpected boolean value", myBool, receivedMap.get(myBoolKey));
assertEquals("Unexpected byte value", myByte, receivedMap.get(myByteKey));
byte[] readBytes = (byte[]) receivedMap.get(myBytesKey);
assertTrue("Read bytes were not as expected: " + Arrays.toString(readBytes), Arrays.equals(myBytes, readBytes));
assertEquals("Unexpected char value", myChar, receivedMap.get(myCharKey));
assertEquals("Unexpected double value", myDouble, (double) receivedMap.get(myDoubleKey), 0.0);
assertEquals("Unexpected float value", myFloat, (float) receivedMap.get(myFloatKey), 0.0);
assertEquals("Unexpected int value", myInt, receivedMap.get(myIntKey));
assertEquals("Unexpected long value", myLong, receivedMap.get(myLongKey));
assertEquals("Unexpected short value", myShort, receivedMap.get(myShortKey));
assertEquals("Unexpected UTF value", myString, receivedMap.get(myStringKey));
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyTextMessage() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
final String content = "Message-Content";
Queue queue = context.createQueue("myQueue");
DescribedType amqpValueContent = new AmqpValueDescribedType(content);
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, amqpValueContent);
testPeer.expectDispositionThatIsAcceptedAndSettled();
testPeer.expectEnd();
testPeer.expectClose();
JMSConsumer messageConsumer = context.createConsumer(queue);
String received = messageConsumer.receiveBody(String.class, 3000);
assertNotNull(received);
assertEquals(content, received);
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyObjectMessage() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue queue = context.createQueue("myQueue");
PropertiesDescribedType properties = new PropertiesDescribedType();
properties.setContentType(AmqpMessageSupport.SERIALIZED_JAVA_OBJECT_CONTENT_TYPE);
String expectedContent = "expectedContent";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(expectedContent);
oos.flush();
oos.close();
byte[] bytes = baos.toByteArray();
MessageAnnotationsDescribedType msgAnnotations = new MessageAnnotationsDescribedType();
msgAnnotations.setSymbolKeyedAnnotation(AmqpMessageSupport.JMS_MSG_TYPE.toString(), AmqpMessageSupport.JMS_OBJECT_MESSAGE);
DescribedType dataContent = new DataDescribedType(new Binary(bytes));
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, msgAnnotations, properties, null, dataContent);
testPeer.expectDispositionThatIsAcceptedAndSettled();
testPeer.expectEnd();
testPeer.expectClose();
JMSConsumer messageConsumer = context.createConsumer(queue);
String received = messageConsumer.receiveBody(String.class, 3000);
assertNotNull(received);
assertEquals(expectedContent, received);
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyBytesMessage() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue queue = context.createQueue("myQueue");
PropertiesDescribedType properties = new PropertiesDescribedType();
properties.setContentType(AmqpMessageSupport.OCTET_STREAM_CONTENT_TYPE);
MessageAnnotationsDescribedType msgAnnotations = null;
msgAnnotations = new MessageAnnotationsDescribedType();
msgAnnotations.setSymbolKeyedAnnotation(AmqpMessageSupport.JMS_MSG_TYPE.toString(), AmqpMessageSupport.JMS_BYTES_MESSAGE);
final byte[] expectedContent = "expectedContent".getBytes();
DescribedType dataContent = new DataDescribedType(new Binary(expectedContent));
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, msgAnnotations, properties, null, dataContent);
testPeer.expectDispositionThatIsAcceptedAndSettled();
JMSConsumer messageConsumer = context.createConsumer(queue);
byte[] received = messageConsumer.receiveBody(byte[].class, 3000);
testPeer.waitForAllHandlersToComplete(3000);
assertNotNull(received);
assertTrue(Arrays.equals(expectedContent, received));
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyFailsDoesNotAcceptMessageAutoAck() throws Exception {
doTestReceiveBodyFailsDoesNotAcceptMessage(JMSContext.AUTO_ACKNOWLEDGE);
}
@Test(timeout = 20000)
public void testReceiveBodyFailsDoesNotAcceptMessageDupsOk() throws Exception {
doTestReceiveBodyFailsDoesNotAcceptMessage(JMSContext.DUPS_OK_ACKNOWLEDGE);
}
@Test(timeout = 20000)
public void testReceiveBodyFailsDoesNotAcceptMessageClientAck() throws Exception {
doTestReceiveBodyFailsDoesNotAcceptMessage(JMSContext.CLIENT_ACKNOWLEDGE);
}
public void doTestReceiveBodyFailsDoesNotAcceptMessage(int sessionMode) throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
final String content = "Message-Content";
Queue queue = context.createQueue("myQueue");
DescribedType amqpValueContent = new AmqpValueDescribedType(content);
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, amqpValueContent);
testPeer.expectEnd();
testPeer.expectClose();
JMSConsumer messageConsumer = context.createConsumer(queue);
try {
messageConsumer.receiveBody(Boolean.class, 3000);
fail("Should not read as Boolean type");
} catch (MessageFormatRuntimeException mfre) {
}
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyFailsThenAcceptsOnSuccessfullyNextCallAutoAck() throws Exception {
doTestReceiveBodyFailsDoesNotAcceptMessage(JMSContext.AUTO_ACKNOWLEDGE);
}
@Test(timeout = 20000)
public void testReceiveBodyFailsThenAcceptsOnSuccessfullyNextCallDupsOk() throws Exception {
doTestReceiveBodyFailsDoesNotAcceptMessage(JMSContext.DUPS_OK_ACKNOWLEDGE);
}
@Test(timeout = 20000)
public void testReceiveBodyFailsThenGetNullOnNextAttemptClientAck() throws Exception {
doTestReceiveBodyFailsDoesNotAcceptMessage(JMSContext.CLIENT_ACKNOWLEDGE);
}
public void doTestReceiveBodyFailsThenCalledWithCorrectType(int sessionMode) throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
final String content = "Message-Content";
Queue queue = context.createQueue("myQueue");
DescribedType amqpValueContent = new AmqpValueDescribedType(content);
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, amqpValueContent);
JMSConsumer messageConsumer = context.createConsumer(queue);
try {
messageConsumer.receiveBody(Boolean.class, 3000);
fail("Should not read as Boolean type");
} catch (MessageFormatRuntimeException mfre) {
}
testPeer.waitForAllHandlersToComplete(3000);
if (sessionMode == JMSContext.AUTO_ACKNOWLEDGE ||
sessionMode == JMSContext.DUPS_OK_ACKNOWLEDGE) {
testPeer.expectDispositionThatIsAcceptedAndSettled();
}
String received = messageConsumer.receiveBody(String.class, 3000);
if (sessionMode == JMSContext.AUTO_ACKNOWLEDGE ||
sessionMode == JMSContext.DUPS_OK_ACKNOWLEDGE) {
assertNotNull(received);
assertEquals(content, received);
} else {
assertNull(received);
}
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
@Test(timeout = 20000)
public void testReceiveBodyBytesMessageFailsWhenWrongTypeRequested() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectBegin();
Queue queue = context.createQueue("myQueue");
PropertiesDescribedType properties = new PropertiesDescribedType();
properties.setContentType(AmqpMessageSupport.OCTET_STREAM_CONTENT_TYPE);
MessageAnnotationsDescribedType msgAnnotations = null;
msgAnnotations = new MessageAnnotationsDescribedType();
msgAnnotations.setSymbolKeyedAnnotation(AmqpMessageSupport.JMS_MSG_TYPE.toString(), AmqpMessageSupport.JMS_BYTES_MESSAGE);
final byte[] expectedContent = "expectedContent".getBytes();
DescribedType dataContent = new DataDescribedType(new Binary(expectedContent));
testPeer.expectReceiverAttach();
testPeer.expectLinkFlowRespondWithTransfer(null, msgAnnotations, properties, null, dataContent, 2);
testPeer.expectDispositionThatIsAcceptedAndSettled();
testPeer.expectDispositionThatIsAcceptedAndSettled();
JMSConsumer messageConsumer = context.createConsumer(queue);
try {
messageConsumer.receiveBody(String.class, 3000);
fail("Should not read as String type");
} catch (MessageFormatRuntimeException mfre) {
}
byte[] received1 = messageConsumer.receiveBody(byte[].class, 3000);
try {
messageConsumer.receiveBody(Map.class, 3000);
fail("Should not read as Map type");
} catch (MessageFormatRuntimeException mfre) {
}
byte[] received2 = (byte[]) messageConsumer.receiveBody(Object.class, 3000);
testPeer.waitForAllHandlersToComplete(3000);
assertNotNull(received1);
assertNotNull(received2);
assertTrue(Arrays.equals(expectedContent, received1));
assertTrue(Arrays.equals(expectedContent, received2));
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(3000);
}
}
}
|
|
/* Copyright (c) 2013-2014 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Johnathan Garrett (LMN Solutions) - initial implementation
*/
package org.locationtech.geogig.remote;
import static org.locationtech.geogig.storage.datastream.FormatCommonV1.readObjectId;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.eclipse.jdt.annotation.Nullable;
import org.locationtech.geogig.api.NodeRef;
import org.locationtech.geogig.api.ObjectId;
import org.locationtech.geogig.api.RevObject;
import org.locationtech.geogig.api.plumbing.diff.DiffEntry;
import org.locationtech.geogig.repository.Repository;
import org.locationtech.geogig.storage.BulkOpListener;
import org.locationtech.geogig.storage.BulkOpListener.CountingListener;
import org.locationtech.geogig.storage.ObjectStore;
import org.locationtech.geogig.storage.datastream.DataStreamSerializationFactoryV1;
import org.locationtech.geogig.storage.datastream.FormatCommonV1;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.AbstractIterator;
import com.google.common.io.CountingOutputStream;
/**
* Provides a method of packing a set of changes and the affected objects to and from a binary
* stream.
*/
public final class BinaryPackedChanges {
private static final Logger LOGGER = LoggerFactory.getLogger(BinaryPackedChanges.class);
private static final DataStreamSerializationFactoryV1 serializer = DataStreamSerializationFactoryV1.INSTANCE;
private final Repository repository;
private boolean filtered;
private static enum CHUNK_TYPE {
DIFF_ENTRY {
@Override
public int value() {
return 0;
}
},
OBJECT_AND_DIFF_ENTRY {
@Override
public int value() {
return 1;
}
},
METADATA_OBJECT_AND_DIFF_ENTRY {
@Override
public int value() {
return 2;
}
},
FILTER_FLAG {
@Override
public int value() {
return 3;
}
};
public abstract int value();
private static final CHUNK_TYPE[] values = CHUNK_TYPE.values();
public static CHUNK_TYPE valueOf(int value) {
// abusing the fact that value() coincides with ordinal()
return values[value];
}
};
/**
* Constructs a new {@code BinaryPackedChanges} instance using the provided {@link Repository}.
*
* @param repository the repository to save objects to, or read objects from, depending on the
* operation
*/
public BinaryPackedChanges(Repository repository) {
this.repository = repository;
filtered = false;
}
public boolean wasFiltered() {
return filtered;
}
/**
* Writes the set of changes to the provided output stream.
*
* @param out the stream to write to
* @param changes the changes to write
* @throws IOException
* @return the number of objects written
*/
public long write(OutputStream out, Iterator<DiffEntry> changes) throws IOException {
final ObjectStore objectDatabase = repository.objectDatabase();
out = new CountingOutputStream(out);
// avoids sending the same metadata object multiple times
Set<ObjectId> writtenMetadataIds = new HashSet<ObjectId>();
// buffer to avoid ObjectId cloning its internal state for each object
byte[] oidbuffer = new byte[ObjectId.NUM_BYTES];
long objectCount = 0;
while (changes.hasNext()) {
DiffEntry diff = changes.next();
if (diff.isDelete()) {
out.write(CHUNK_TYPE.DIFF_ENTRY.value());
} else {
// its a change or an addition, new object is guaranteed to be present
NodeRef newObject = diff.getNewObject();
ObjectId metadataId = newObject.getMetadataId();
if (writtenMetadataIds.contains(metadataId)) {
out.write(CHUNK_TYPE.OBJECT_AND_DIFF_ENTRY.value());
} else {
out.write(CHUNK_TYPE.METADATA_OBJECT_AND_DIFF_ENTRY.value());
RevObject metadata = objectDatabase.get(metadataId);
writeObjectId(metadataId, out, oidbuffer);
serializer.write(metadata, out);
writtenMetadataIds.add(metadataId);
objectCount++;
}
ObjectId objectId = newObject.getObjectId();
writeObjectId(objectId, out, oidbuffer);
RevObject object = objectDatabase.get(objectId);
serializer.write(object, out);
objectCount++;
}
DataOutput dataOut = new DataOutputStream(out);
FormatCommonV1.writeDiff(diff, dataOut);
}
// signal the end of changes
out.write(CHUNK_TYPE.FILTER_FLAG.value());
final boolean filtersApplied = changes instanceof FilteredDiffIterator
&& ((FilteredDiffIterator) changes).wasFiltered();
out.write(filtersApplied ? 1 : 0);
LOGGER.info(String.format("Written %,d bytes to remote accounting for %,d objects.",
((CountingOutputStream) out).getCount(), objectCount));
return objectCount;
}
private void writeObjectId(ObjectId objectId, OutputStream out, byte[] oidbuffer)
throws IOException {
objectId.getRawValue(oidbuffer);
out.write(oidbuffer);
}
/**
* Read in the changes from the provided input stream and call the provided callback for each
* change. The input stream represents the output of another {@code BinaryPackedChanges}
* instance.
*
* @param in the stream to read from
* @param callback the callback to call for each item
*/
public void ingest(final InputStream in, Callback callback) {
PacketReadingIterator readingIterator = new PacketReadingIterator(in);
Iterator<RevObject> asObjects = asObjects(readingIterator, callback);
ObjectStore objectDatabase = repository.objectDatabase();
CountingListener listener = BulkOpListener.newCountingListener();
objectDatabase.putAll(asObjects, listener);
LOGGER.info("Ingested %,d objects. Inserted: %,d. Already existing: %,d\n",
listener.inserted() + listener.found(), listener.inserted(), listener.found());
this.filtered = readingIterator.isFiltered();
}
/**
* Returns an iterator that calls the {@code callback} for each {@link DiffPacket}'s
* {@link DiffEntry} once, and returns either zero, one, or two {@link RevObject}s, depending on
* which information the diff packet carried over.
*/
private Iterator<RevObject> asObjects(final PacketReadingIterator readingIterator,
final Callback callback) {
return new AbstractIterator<RevObject>() {
private DiffPacket current;
@Override
protected RevObject computeNext() {
if (current != null) {
Preconditions.checkState(current.metadataObject != null);
RevObject ret = current.metadataObject;
current = null;
return ret;
}
while (readingIterator.hasNext()) {
DiffPacket diffPacket = readingIterator.next();
callback.callback(diffPacket.entry);
RevObject obj = diffPacket.newObject;
RevObject md = diffPacket.metadataObject;
Preconditions.checkState(obj != null || (obj == null && md == null));
if (obj != null) {
if (md != null) {
current = diffPacket;
}
return obj;
}
}
return endOfData();
}
};
}
private static class DiffPacket {
public final DiffEntry entry;
@Nullable
public final RevObject newObject;
@Nullable
public final RevObject metadataObject;
public DiffPacket(DiffEntry entry, @Nullable RevObject newObject,
@Nullable RevObject metadata) {
this.entry = entry;
this.newObject = newObject;
this.metadataObject = metadata;
}
}
private static class PacketReadingIterator extends AbstractIterator<DiffPacket> {
private InputStream in;
private DataInput data;
private boolean filtered;
public PacketReadingIterator(InputStream in) {
this.in = in;
this.data = new DataInputStream(in);
}
/**
* @return {@code true} if the stream finished with a non zero "filter applied" marker
*/
public boolean isFiltered() {
return filtered;
}
@Override
protected DiffPacket computeNext() {
try {
return readNext();
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
private DiffPacket readNext() throws IOException {
final CHUNK_TYPE chunkType = CHUNK_TYPE.valueOf((int) (data.readByte() & 0xFF));
RevObject revObj = null;
RevObject metadata = null;
switch (chunkType) {
case DIFF_ENTRY:
break;
case OBJECT_AND_DIFF_ENTRY: {
ObjectId id = readObjectId(data);
revObj = serializer.read(id, in);
}
break;
case METADATA_OBJECT_AND_DIFF_ENTRY: {
ObjectId mdid = readObjectId(data);
metadata = serializer.read(mdid, in);
ObjectId id = readObjectId(data);
revObj = serializer.read(id, in);
}
break;
case FILTER_FLAG: {
int changesFiltered = in.read();
if (changesFiltered != 0) {
filtered = true;
}
return endOfData();
}
default:
throw new IllegalStateException("Unknown chunk type: " + chunkType);
}
DiffEntry diff = FormatCommonV1.readDiff(data);
return new DiffPacket(diff, revObj, metadata);
}
}
/**
* Interface for callback methods to be used by the read and write operations.
*/
public static interface Callback {
public abstract void callback(DiffEntry diff);
}
}
|
|
/*
* All files in the distribution of BLOAT (Bytecode Level Optimization and
* Analysis tool for Java(tm)) are Copyright 1997-2001 by the Purdue
* Research Foundation of Purdue University. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package EDU.purdue.cs.bloat.tree;
import EDU.purdue.cs.bloat.editor.Type;
import EDU.purdue.cs.bloat.util.Assert;
/**
* Expr is the superclass for a number of other classes representing expressions
* in byte code. Expressions are typed and may be nested.
*
* @see DefExpr
*/
public abstract class Expr extends Node implements Cloneable {
protected Type type; // The type (descriptor) of this expression
private DefExpr def; // The expression in which this expression
// is defined (if applicable)
private Object comparator;
/**
* Constructor. Initializes an expression with a given type.
*
* @param type
* The initial Type (descriptor) of this expression.
*/
public Expr(final Type type) {
this.def = null;
this.comparator = new ExprComparator();
this.type = type;
}
/**
* Sets the type of this expression. Returns whether or not the type changed
* as a result of calling this method.
*/
public boolean setType(final Type type) {
if (!this.type.equals(type)) {
// if (Tree.DEBUG) {
// System.out.println(" setting typeof(" + this + ") = " + type);
// }
this.type = type;
return true;
}
return false;
}
/**
* Returns whether or not this expression is a defining occurrence. By
* default, false is returned.
*/
public boolean isDef() {
return false;
}
/**
* Returns the statement to which this expression belongs. It essentially
* searches up the expression tree for this expression's first ancestor
* which is a Stmt.
*/
public Stmt stmt() {
Node p = parent;
while (!(p instanceof Stmt)) {
Assert.isTrue(!(p instanceof Tree), "Invalid ancestor of " + this);
Assert.isTrue(p != null, "Null ancestor of " + this);
p = p.parent;
}
return (Stmt) p;
}
/**
* Returns the Type of this expression.
*/
public Type type() {
return type;
}
/**
* Cleans up this expression only, not its children.
*/
public void cleanupOnly() {
setDef(null);
}
/**
* Sets the expression that defines this expression.
*
* @param def
* Defining expression.
*/
public void setDef(final DefExpr def) {
// if (Tree.DEBUG) {
// System.out.println(" setting def of " + this +
// " (" + System.identityHashCode(this) + ") to " + def);
// }
if (this.def == def) {
return;
}
// If this Expr already had a defining statement, remove this from the
// DefExpr use list.
if (this.def != null) {
this.def.removeUse(this);
}
if (this.isDef()) {
Assert.isTrue((def == this) || (def == null));
this.def = null;
return;
}
this.def = def;
if (this.def != null) {
this.def.addUse(this); // This Expr is a use of def
}
}
/**
* Returns the expression in which this Expr is defined.
*/
public DefExpr def() {
return def;
}
/**
* Returns the hash code for this expresion.
*/
public abstract int exprHashCode();
/**
* Compares this expression to another.
*
* @param other
* Expr to which to compare this.
*/
public abstract boolean equalsExpr(Expr other);
public abstract Object clone();
/**
* Copies the contents of another expression in this one.
*
* @param expr
* The expression from which to copy.
*/
protected Expr copyInto(Expr expr) {
expr = (Expr) super.copyInto(expr);
final DefExpr def = def();
if (isDef()) {
expr.setDef(null);
} else {
expr.setDef(def);
}
return expr;
}
/**
* Returns an Object that can be used to compare other Expr to this.
*/
public Object comparator() {
return comparator;
}
/**
* ExprComparator is used to provide a different notion of equality among
* expressions than the default ==. In most cases, we want ==, but
* occasionally we want the equalsExpr() functionality when inserting in
* Hashtables, etc.
*/
private class ExprComparator {
Expr expr = Expr.this;
public boolean equals(final Object obj) {
if (obj instanceof ExprComparator) {
final Expr other = ((ExprComparator) obj).expr;
return expr.equalsExpr(other)
&& expr.type.simple().equals(other.type.simple());
}
return false;
}
public int hashCode() {
return Expr.this.exprHashCode();
}
}
}
|
|
/*
* ============================================================================
*
* File: SemiSpaceAdmin.java
*----------------------------------------------------------------------------
*
* Copyright 2008 Erlend Nossum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Description: See javadoc below
*
* Created: 16. feb.. 2008
* ============================================================================
*/
package org.semispace.admin;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.semispace.DistributedEvent;
import org.semispace.Holder;
import org.semispace.NameValueQuery;
import org.semispace.SemiSpace;
import org.semispace.SemiSpaceInterface;
import org.semispace.event.SemiAvailabilityEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.thoughtworks.xstream.XStream;
public class SemiSpaceAdmin implements SemiSpaceAdminInterface {
private static final Logger log = LoggerFactory.getLogger(SemiSpaceAdmin.class);
private boolean master;
private SemiSpaceInterface space;
private boolean beenInitialized;
private long clockSkew;
private int spaceId;
private ExecutorService pool;
private Thread shutDownHook;
private PeriodicHarvest periodicHarvest;
public SemiSpaceAdmin(SemiSpaceInterface terraSpace) {
ThreadPoolExecutor tpe = new ThreadPoolExecutor(0, 5000,
5L, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>(true));
tpe.setThreadFactory(new DaemonDelegateFactory(tpe.getThreadFactory()));
// Exchanging strategy. When thread pool is full, try to run on local thread.
tpe.setRejectedExecutionHandler(new SemiSpaceRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy()));
tpe.allowCoreThreadTimeOut(true);
this.pool = tpe;
this.space = terraSpace;
this.beenInitialized = false;
this.clockSkew = 0;
this.spaceId = 0;
this.master = false;
this.periodicHarvest = new PeriodicHarvest(this);
}
/**
* Used from junit test.
*/
protected int getSpaceId() {
return spaceId;
}
/**
* @return space configured for this admin. Beneficiary for subclasses.
*/
protected SemiSpaceInterface getSpace() {
return space;
}
@Override
public ExecutorService getThreadPool() {
return pool;
}
@Override
public boolean hasBeenInitialized() {
return this.beenInitialized;
}
@Override
public boolean isMaster() {
return this.master;
}
@Override
public long calculateTime() {
return System.currentTimeMillis() - clockSkew;
}
@Override
public void performInitialization() {
if (beenInitialized) {
log.warn("Initialization called more than once.");
return;
}
beenInitialized = true;
Runnable hook = new Runnable() {
@Override
@SuppressWarnings("synthetic-access")
public void run() {
log.info("Shutdown hook shutting down semispace.");
shutdownAndAwaitTermination();
}
};
shutDownHook = new Thread( hook);
Runtime.getRuntime().addShutdownHook(shutDownHook);
//
// Fire up connection
//
long last;
long current = SemiSpace.ONE_DAY;
int count = 0;
// Perform query as long as the connection is improving
do {
count++;
last = current;
current = fireUpConnection();
} while (current < last);
log.info("Needed " + count + " iterations in order to find the best time, which was " + current + " ms.");
//
// Figure out the ID of this space
//
spaceId = figureOutSpaceId();
log.info("Space id was found to be " + spaceId);
//
// (Try to) find clock skew
queryForMasterTime();
// log.info( "Calculate time, which should give an approximation of the master time, reports ["+new
// Date(calculateTime())+"]");
periodicHarvest = new PeriodicHarvest(this);
periodicHarvest.startReaper();
}
private int figureOutSpaceId() {
List<IdentifyAdminQuery> admins = new ArrayList<IdentifyAdminQuery>();
IdentifyAdminQuery masterFound = populateListOfAllSpaces(admins);
Collections.sort(admins, new IdentifyAdminQueryComparator());
int foundId = 1;
if (!admins.isEmpty()) {
// Collection is sorted, and therefore the admin should increase
IdentifyAdminQuery admin = admins.get(0);
if (admin.id != null) {
foundId = admin.id.intValue() + 1;
}
}
if (masterFound == null) {
log.info("I am master, as no other master was identified.");
assumeAdminResponsibility(! admins.isEmpty());
}
return foundId;
}
protected void assumeAdminResponsibility(boolean sendAdminInfoAboutSystemTime) {
master = true;
if (sendAdminInfoAboutSystemTime) {
log.info("Informing other masters of system time.");
TimeAnswer ta = new TimeAnswer();
ta.masterId = getSpaceId();
ta.timeFromMaster = Long.valueOf(System.currentTimeMillis());
space.write(ta, 1000);
}
}
/**
* Protected as it is used every once in a while from periodic object reaper
* @param admins List to fill with the admin processes found
* @return List of identified SemiSpace admin classes
*/
protected IdentifyAdminQuery populateListOfAllSpaces(List<IdentifyAdminQuery> admins) {
IdentifyAdminQuery identifyAdmin = new IdentifyAdminQuery();
identifyAdmin.hasAnswered = Boolean.FALSE;
space.write(identifyAdmin, SemiSpace.ONE_DAY);
IdentifyAdminQuery iaq = new IdentifyAdminQuery();
iaq.hasAnswered = Boolean.TRUE;
IdentifyAdminQuery masterFound = null;
IdentifyAdminQuery answer = null;
long waitFor = 750;
do {
answer = space.take(iaq, waitFor);
// When the first answer has arrived, the others, if any, should come close behind.
waitFor = 250;
if (answer != null) {
admins.add(answer);
if (Boolean.TRUE.equals(answer.amIAdmin)) {
if (masterFound != null) {
log.error("More than one admin found, both " + masterFound.id + " and " + answer.id);
}
masterFound = answer;
}
}
// Looping until we do not find any more admins
} while (answer != null);
while ( space.takeIfExists(new IdentifyAdminQuery()) != null) { // NOSONAR
// Remove identity query from space as we do not need it anymore. If more than one present, we have a race condition (not likely)
}
return masterFound;
}
/**
* The very first query may take some time (when using terracotta), and it is therefore prudent to kick start the
* connection.
*
* @return Time it took in ms for an answer to be obtained.
*/
private long fireUpConnection() {
long bench = System.currentTimeMillis();
NameValueQuery nvq = new NameValueQuery();
nvq.name = "Internal admin query";
nvq.value = "Dummy-value in order to be (quite) unique [" + bench + "]";
space.write(nvq, SemiSpace.ONE_DAY);
nvq = space.take(nvq, 1000);
if (nvq == null) {
throw new AssertionError("Unable to retrieve query which is designed to kickstart space.");
}
long timed = System.currentTimeMillis() - bench;
return timed;
}
/**
* Obtaining time by querying with internal query
*/
private void queryForMasterTime() {
TimeQuery tq = new TimeQuery();
tq.isFinished = Boolean.FALSE;
// Letting the query itself exist a day. This is as skew can be large.
space.write(tq, SemiSpace.ONE_DAY);
space.read(new TimeAnswer(), 2500);
space.takeIfExists(tq);
}
/**
*
*/
private void notifyAboutInternalQuery(InternalQuery incoming) {
// log.info("Incoming admin query for space "+getSpaceId()+" of type "+incoming.getClass().getName());
if (incoming instanceof TimeQuery) {
answerTimeQuery((TimeQuery) incoming);
} else if (incoming instanceof IdentifyAdminQuery) {
answerIdentityQuery((IdentifyAdminQuery) incoming);
} else if (incoming instanceof TimeAnswer) {
treatIncomingTimeAnswer((TimeAnswer) incoming);
} else {
log.warn("Unknown internal query");
}
}
/**
* A (potentially new) admin process has given time answer. Adjust time accordingly
*/
private void treatIncomingTimeAnswer(TimeAnswer incoming) {
if (isMaster()) {
if (incoming.masterId != getSpaceId()) {
String adminfo = "Got more than one space that perceives it is admin space: " + incoming.masterId
+ " and myself: " + getSpaceId();
if (incoming.masterId < getSpaceId()) {
master = false;
adminfo += ". Removing this space as master.";
} else {
adminfo += ". Keeping this space as master.";
}
log.warn(adminfo);
} else {
clockSkew = 0;
}
}
// Need to test again as we may have been reset:
if (!isMaster()) {
long systime = System.currentTimeMillis();
clockSkew = systime - incoming.timeFromMaster.longValue();
log.info("Master has " + " [" + new Date(incoming.timeFromMaster.longValue()) + "]" + ", whereas I have ["
+ new Date(systime) + "]. This gives a skew of " + clockSkew + ".");
}
}
private void answerIdentityQuery(IdentifyAdminQuery identify) {
if (spaceId < 1) {
return;
}
if (identify.hasAnswered != null && identify.hasAnswered.booleanValue()) {
return;
}
IdentifyAdminQuery answer = new IdentifyAdminQuery();
answer.amIAdmin = Boolean.valueOf(master);
answer.hasAnswered = Boolean.TRUE;
answer.id = Integer.valueOf(spaceId);
log.debug("Giving identity answer for space " + spaceId + ", which is" + (master ? "" : " NOT") + " master.");
space.write(answer, SemiSpace.ONE_DAY);
}
private void answerTimeQuery(TimeQuery tq) {
if (isMaster() && !tq.isFinished.booleanValue()) {
TimeAnswer answer = new TimeAnswer();
answer.timeFromMaster = Long.valueOf(System.currentTimeMillis());
answer.masterId = getSpaceId();
space.write(answer, 1000);
log.info("Giving answer about time (which was found to be " + answer.timeFromMaster + ", which is "
+ new Date(answer.timeFromMaster.longValue()) + ")");
}
}
@Override
public void notifyAboutEvent(DistributedEvent event) {
if (event.getEvent() instanceof SemiAvailabilityEvent) {
if (InternalQuery.class.getName().equals(event.getHolderClassName()) && space instanceof SemiSpace ) {
Holder holder = ((SemiSpace)space).readHolderById(event.getEvent().getId());
if ( holder != null ) {
notifyAboutInternalQuery((InternalQuery) new XStream().fromXML(holder.getXml()));
}
}
}
}
/**
* The cached thread pool has a timeout of a minute, so a shutdown is not immediate. This method will try to speed
* up the process, but it is not mandatory to use it.
* The method is protected for the benefit of subclasses.
*/
protected void shutdownAndAwaitTermination() {
if ( pool.isShutdown() && periodicHarvest.isCancelled()) {
// Already had a shutdown notification.
return;
}
periodicHarvest.cancelReaper();
pool.shutdown(); // Disable new tasks from being submitted
try {
// Wait a while for existing tasks to terminate
if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
pool.shutdownNow(); // Cancel currently executing tasks
// Wait a while for tasks to respond to being cancelled
if (!pool.awaitTermination(60, TimeUnit.SECONDS)) {
log.warn("Pool did not terminate");
}
}
} catch (InterruptedException ignored) {
// (Re-)Cancel if current thread also interrupted
pool.shutdownNow();
// Preserve interrupt status
Thread.currentThread().interrupt();
}
}
/**
* Remove shutdown hook which otherwise is run when the space is shut down.
* Primarily used when exchanging this admin with another.
*/
public void removeShutDownHook() {
periodicHarvest.cancelReaper();
if ( shutDownHook != null ) {
Runtime.getRuntime().removeShutdownHook(shutDownHook);
}
}
private static class IdentifyAdminQueryComparator implements Comparator<IdentifyAdminQuery>, Serializable {
@Override
public int compare(IdentifyAdminQuery a1, IdentifyAdminQuery a2) {
if (a1.id == null) {
return 1;
} else if (a2.id == null) {
return -1;
}
return a2.id.intValue() - a1.id.intValue();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.